diff --git a/.gitignore b/.gitignore index a5995309287..3faa6809c38 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,6 @@ bin/** coverage.html .idea/ .DS_Store -.glide .scBuildImage .init .generate* @@ -21,3 +20,7 @@ contrib/build/*/tmp/* .kube .var .vscode +vendor/github.com/petar/GoLLRB/doc/* +contrib/examples/consumer/Gopkg.lock +contrib/examples/consumer +contrib/examples/vendor/* diff --git a/.travis.yml b/.travis.yml index 09843934df4..0e0b6455431 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ services: - docker cache: directories: - - .glide + - $GOPATH/pkg/dep script: - make verify build build-integration build-e2e test images svcat deploy: diff --git a/Gopkg.lock b/Gopkg.lock new file mode 100644 index 00000000000..a2edfc15805 --- /dev/null +++ b/Gopkg.lock @@ -0,0 +1,1181 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + branch = "default" + name = "bitbucket.org/ww/goautoneg" + packages = ["."] + revision = "75cd24fc2f2c2a2088577d12123ddee5f54e0675" + +[[projects]] + name = "cloud.google.com/go" + packages = [ + "compute/metadata", + "internal" + ] + revision = "3b1ae45394a234c385be014e9a488f2bb6eef821" + +[[projects]] + name = "code.cloudfoundry.org/lager" + packages = ["."] + revision = "0bfa98e49e7a976af91e918d47978f07c00b081f" + +[[projects]] + name = "github.com/Azure/go-autorest" + packages = [ + "autorest", + "autorest/adal", + "autorest/azure", + "autorest/date" + ] + revision = "e14a70c556c8e0db173358d1a903dca345a8e75e" + version = "v9.1.0" + +[[projects]] + name = "github.com/NYTimes/gziphandler" + packages = ["."] + revision = "56545f4a5d46df9a6648819d1664c3a03a13ffdb" + +[[projects]] + name = "github.com/PuerkitoBio/purell" + packages = ["."] + revision = "8a290539e2e8629dbc4e6bad948158f790ec31f4" + version = "v1.0.0" + +[[projects]] + name = "github.com/PuerkitoBio/urlesc" + packages = ["."] + revision = "5bd2802263f21d8788851d5305584c82a5c75d7e" + +[[projects]] + branch = "master" + name = "github.com/bazelbuild/buildtools" + packages = [ + "build", + "tables" + ] + revision = "c20a867a9399dedaac6a523fb853022ff7e96f68" + +[[projects]] + name = "github.com/beorn7/perks" + packages = ["quantile"] + revision = "3ac7bf7a47d159a033b107610db8a1b6575507a4" + +[[projects]] + name = "github.com/cockroachdb/cmux" + packages = ["."] + revision = "112f0506e7743d64a6eb8fedbcff13d9979bbf92" + +[[projects]] + name = "github.com/coreos/bbolt" + packages = ["."] + revision = "32c383e75ce054674c53b5a07e55de85332aee14" + version = "v1.3.1-coreos.5" + +[[projects]] + name = "github.com/coreos/etcd" + packages = [ + "alarm", + "auth", + "auth/authpb", + "client", + "clientv3", + "clientv3/concurrency", + "compactor", + "discovery", + "embed", + "error", + "etcdserver", + "etcdserver/api", + "etcdserver/api/etcdhttp", + "etcdserver/api/v2http", + "etcdserver/api/v2http/httptypes", + "etcdserver/api/v3client", + "etcdserver/api/v3election", + "etcdserver/api/v3election/v3electionpb", + "etcdserver/api/v3election/v3electionpb/gw", + "etcdserver/api/v3lock", + "etcdserver/api/v3lock/v3lockpb", + "etcdserver/api/v3lock/v3lockpb/gw", + "etcdserver/api/v3rpc", + "etcdserver/api/v3rpc/rpctypes", + "etcdserver/auth", + "etcdserver/etcdserverpb", + "etcdserver/etcdserverpb/gw", + "etcdserver/membership", + "etcdserver/stats", + "lease", + "lease/leasehttp", + "lease/leasepb", + "mvcc", + "mvcc/backend", + "mvcc/mvccpb", + "pkg/adt", + "pkg/contention", + "pkg/cors", + "pkg/cpuutil", + "pkg/crc", + "pkg/debugutil", + "pkg/fileutil", + "pkg/httputil", + "pkg/idutil", + "pkg/ioutil", + "pkg/logutil", + "pkg/monotime", + "pkg/netutil", + "pkg/pathutil", + "pkg/pbutil", + "pkg/runtime", + "pkg/schedule", + "pkg/srv", + "pkg/tlsutil", + "pkg/transport", + "pkg/types", + "pkg/wait", + "proxy/grpcproxy/adapter", + "raft", + "raft/raftpb", + "rafthttp", + "snap", + "snap/snappb", + "store", + "version", + "wal", + "wal/walpb" + ] + revision = "95a726a27e09030f9ccbd9982a1508f5a6d25ada" + version = "v3.2.13" + +[[projects]] + name = "github.com/coreos/go-semver" + packages = ["semver"] + revision = "568e959cd89871e61434c1143528d9162da89ef2" + +[[projects]] + name = "github.com/coreos/go-systemd" + packages = [ + "daemon", + "journal" + ] + revision = "48702e0da86bd25e76cfef347e2adeb434a0d0a6" + version = "v14" + +[[projects]] + name = "github.com/coreos/pkg" + packages = ["capnslog"] + revision = "fa29b1d70f0beaddd4c7021607cc3c3be8ce94b8" + +[[projects]] + name = "github.com/davecgh/go-spew" + packages = ["spew"] + revision = "782f4967f2dc4564575ca782fe2d04090b5faca8" + +[[projects]] + name = "github.com/dgrijalva/jwt-go" + packages = ["."] + revision = "01aeca54ebda6e0fbfafd0a524d234159c05ec20" + +[[projects]] + name = "github.com/elazarl/go-bindata-assetfs" + packages = ["."] + revision = "3dcc96556217539f50599357fb481ac0dc7439b9" + +[[projects]] + name = "github.com/emicklei/go-restful" + packages = [ + ".", + "log" + ] + revision = "ff4f55a206334ef123e4f79bbf348980da81ca46" + +[[projects]] + name = "github.com/emicklei/go-restful-swagger12" + packages = ["."] + revision = "dcef7f55730566d41eae5db10e7d6981829720f6" + version = "1.0.1" + +[[projects]] + branch = "master" + name = "github.com/evanphx/json-patch" + packages = ["."] + revision = "944e07253867aacae43c04b2e6a239005443f33a" + +[[projects]] + name = "github.com/fsnotify/fsnotify" + packages = ["."] + revision = "4da3e2cfbabc9f751898f250b49f2439785783a1" + +[[projects]] + name = "github.com/ghodss/yaml" + packages = ["."] + revision = "73d445a93680fa1a78ae23a5839bad48f32ba1ee" + +[[projects]] + name = "github.com/go-openapi/jsonpointer" + packages = ["."] + revision = "46af16f9f7b149af66e5d1bd010e3574dc06de98" + +[[projects]] + name = "github.com/go-openapi/jsonreference" + packages = ["."] + revision = "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + +[[projects]] + name = "github.com/go-openapi/spec" + packages = ["."] + revision = "7abd5745472fff5eb3685386d5fb8bf38683154d" + +[[projects]] + name = "github.com/go-openapi/swag" + packages = ["."] + revision = "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + +[[projects]] + name = "github.com/gogo/protobuf" + packages = [ + "proto", + "sortkeys" + ] + revision = "c0656edd0d9eab7c66d1eb0c568f9039345796f7" + +[[projects]] + name = "github.com/golang/glog" + packages = ["."] + revision = "44145f04b68cf362d9c4df2182967c2275eaefed" + +[[projects]] + name = "github.com/golang/groupcache" + packages = ["lru"] + revision = "02826c3e79038b59d737d3b1c0a1d937f71a4433" + +[[projects]] + name = "github.com/golang/protobuf" + packages = [ + "jsonpb", + "proto", + "protoc-gen-go/descriptor", + "ptypes", + "ptypes/any", + "ptypes/duration", + "ptypes/struct", + "ptypes/timestamp" + ] + revision = "1643683e1b54a9e88ad26d98f81400c8c9d9f4f9" + +[[projects]] + name = "github.com/google/btree" + packages = ["."] + revision = "7d79101e329e5a3adf994758c578dab82b90c017" + +[[projects]] + name = "github.com/google/gofuzz" + packages = ["."] + revision = "44d81051d367757e1c7c6a5a86423ece9afcf63c" + +[[projects]] + name = "github.com/googleapis/gnostic" + packages = [ + "OpenAPIv2", + "compiler", + "extensions" + ] + revision = "0c5108395e2debce0d731cf0287ddf7242066aba" + +[[projects]] + name = "github.com/gophercloud/gophercloud" + packages = [ + ".", + "openstack", + "openstack/identity/v2/tenants", + "openstack/identity/v2/tokens", + "openstack/identity/v3/tokens", + "openstack/utils", + "pagination" + ] + revision = "8183543f90d1aef267a5ecc209f2e0715b355acb" + +[[projects]] + name = "github.com/gorilla/context" + packages = ["."] + revision = "215affda49addc4c8ef7e2534915df2c8c35c6cd" + +[[projects]] + name = "github.com/gorilla/mux" + packages = ["."] + revision = "8096f47503459bcc74d1f4c487b7e6e42e5746b5" + +[[projects]] + name = "github.com/gregjones/httpcache" + packages = [ + ".", + "diskcache" + ] + revision = "787624de3eb7bd915c329cba748687a3b22666a6" + +[[projects]] + name = "github.com/grpc-ecosystem/go-grpc-prometheus" + packages = ["."] + revision = "6b7015e65d366bf3f19b2b2a000a831940f0f7e0" + version = "v1.1" + +[[projects]] + name = "github.com/grpc-ecosystem/grpc-gateway" + packages = [ + "runtime", + "runtime/internal", + "utilities" + ] + revision = "8cc3a55af3bcf171a1c23a90c4df9cf591706104" + version = "v1.3.0" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/errwrap" + packages = ["."] + revision = "7554cd9344cec97297fa6649b055a8c98c2a1e55" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/go-multierror" + packages = ["."] + revision = "b7773ae218740a7be65057fc60b366a49b538a44" + +[[projects]] + name = "github.com/hashicorp/golang-lru" + packages = [ + ".", + "simplelru" + ] + revision = "a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/hcl" + packages = [ + ".", + "hcl/ast", + "hcl/parser", + "hcl/scanner", + "hcl/strconv", + "hcl/token", + "json/parser", + "json/scanner", + "json/token" + ] + revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8" + +[[projects]] + branch = "master" + name = "github.com/howeyc/gopass" + packages = ["."] + revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8" + +[[projects]] + name = "github.com/imdario/mergo" + packages = ["."] + revision = "6633656539c1639d9d78127b7d47c622b5d7b6dc" + +[[projects]] + name = "github.com/inconshreveable/mousetrap" + packages = ["."] + revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" + version = "v1.0" + +[[projects]] + name = "github.com/jonboulle/clockwork" + packages = ["."] + revision = "72f9bd7c4e0c2a40055ab3d0f09654f730cce982" + +[[projects]] + name = "github.com/json-iterator/go" + packages = ["."] + revision = "36b14963da70d11297d313183d7e6388c8510e1e" + version = "1.0.0" + +[[projects]] + branch = "master" + name = "github.com/jteeuwen/go-bindata" + packages = [ + ".", + "go-bindata" + ] + revision = "a0ff2567cfb70903282db057e799fd826784d41d" + +[[projects]] + name = "github.com/juju/ratelimit" + packages = ["."] + revision = "5b9ff866471762aa2ab2dced63c9fb6f53921342" + version = "1.0" + +[[projects]] + branch = "master" + name = "github.com/kubernetes/repo-infra" + packages = ["kazel"] + revision = "2a736b4fba317cf3038e3cbd06899b544b875fae" + +[[projects]] + name = "github.com/magiconair/properties" + packages = ["."] + revision = "49d762b9817ba1c2e9d0c69183c2b4a8b8f1d934" + +[[projects]] + name = "github.com/mailru/easyjson" + packages = [ + "buffer", + "jlexer", + "jwriter" + ] + revision = "2f5df55504ebc322e4d52d34df6a1f5b503bf26d" + +[[projects]] + branch = "master" + name = "github.com/mattn/go-runewidth" + packages = ["."] + revision = "97311d9f7767e3d6f422ea06661bc2c7a19e8a5d" + +[[projects]] + name = "github.com/matttproud/golang_protobuf_extensions" + packages = ["pbutil"] + revision = "fc2b8d3a73c4867e51861bbdd5ae3c1f0869dd6a" + +[[projects]] + name = "github.com/mitchellh/mapstructure" + packages = ["."] + revision = "06020f85339e21b2478f756a78e295255ffa4d6a" + +[[projects]] + branch = "master" + name = "github.com/mxk/go-flowrate" + packages = ["flowrate"] + revision = "cca7078d478f8520f85629ad7c68962d31ed7682" + +[[projects]] + branch = "master" + name = "github.com/olekukonko/tablewriter" + packages = ["."] + revision = "96aac992fc8b1a4c83841a6c3e7178d20d989625" + +[[projects]] + name = "github.com/onsi/ginkgo" + packages = [ + ".", + "config", + "internal/codelocation", + "internal/containernode", + "internal/failer", + "internal/leafnodes", + "internal/remote", + "internal/spec", + "internal/spec_iterator", + "internal/specrunner", + "internal/suite", + "internal/testingtproxy", + "internal/writer", + "reporters", + "reporters/stenographer", + "reporters/stenographer/support/go-colorable", + "reporters/stenographer/support/go-isatty", + "types" + ] + revision = "11459a886d9cd66b319dac7ef1e917ee221372c9" + +[[projects]] + name = "github.com/onsi/gomega" + packages = [ + ".", + "format", + "internal/assertion", + "internal/asyncassertion", + "internal/oraclematcher", + "internal/testingtsupport", + "matchers", + "matchers/support/goraph/bipartitegraph", + "matchers/support/goraph/edge", + "matchers/support/goraph/node", + "matchers/support/goraph/util", + "types" + ] + revision = "dcabb60a477c2b6f456df65037cb6708210fbb02" + +[[projects]] + name = "github.com/pborman/uuid" + packages = ["."] + revision = "ca53cad383cad2479bbba7f7a1a05797ec1386e4" + +[[projects]] + name = "github.com/pelletier/go-toml" + packages = ["."] + revision = "0131db6d737cfbbfb678f8b7d92e55e27ce46224" + +[[projects]] + branch = "master" + name = "github.com/petar/GoLLRB" + packages = ["llrb"] + revision = "53be0d36a84c2a886ca057d34b6aa4468df9ccb4" + +[[projects]] + name = "github.com/peterbourgon/diskv" + packages = ["."] + revision = "5f041e8faa004a95c88a202771f4cc3e991971e6" + version = "v2.0.1" + +[[projects]] + branch = "master" + name = "github.com/pivotal-cf/brokerapi" + packages = [ + ".", + "auth" + ] + revision = "35946a0079bda144d0c9ed68df36899451f90209" + +[[projects]] + name = "github.com/pkg/errors" + packages = ["."] + revision = "a22138067af1c4942683050411a841ade67fe1eb" + +[[projects]] + name = "github.com/pmorie/go-open-service-broker-client" + packages = [ + "v2", + "v2/fake" + ] + revision = "31d8027f493f8f23f850415d171c7c52a972a6f2" + +[[projects]] + name = "github.com/prometheus/client_golang" + packages = [ + "prometheus", + "prometheus/promhttp" + ] + revision = "e7e903064f5e9eb5da98208bae10b475d4db0f8c" + +[[projects]] + name = "github.com/prometheus/client_model" + packages = ["go"] + revision = "fa8ad6fec33561be4280a8f0514318c79d7f6cb6" + +[[projects]] + name = "github.com/prometheus/common" + packages = [ + "expfmt", + "internal/bitbucket.org/ww/goautoneg", + "model" + ] + revision = "13ba4ddd0caa9c28ca7b7bffe1dfa9ed8d5ef207" + +[[projects]] + name = "github.com/prometheus/procfs" + packages = [ + ".", + "xfs" + ] + revision = "65c1f6f8f0fc1e2185eb9863a3bc751496404259" + +[[projects]] + name = "github.com/satori/go.uuid" + packages = ["."] + revision = "5bf94b69c6b68ee1b541973bb8e1144db23a194b" + +[[projects]] + name = "github.com/spf13/afero" + packages = [ + ".", + "mem" + ] + revision = "57afd63c68602b63ed976de00dd066ccb3c319db" + +[[projects]] + name = "github.com/spf13/cast" + packages = ["."] + revision = "acbeb36b902d72a7a4c18e8f3241075e7ab763e4" + version = "v1.1.0" + +[[projects]] + name = "github.com/spf13/cobra" + packages = ["."] + revision = "f62e98d28ab7ad31d707ba837a966378465c7b57" + +[[projects]] + branch = "master" + name = "github.com/spf13/jwalterweatherman" + packages = ["."] + revision = "7c0cea34c8ece3fbeb2b27ab9b59511d360fb394" + +[[projects]] + name = "github.com/spf13/pflag" + packages = ["."] + revision = "9ff6c6923cfffbcd502984b8e0c80539a94968b7" + +[[projects]] + name = "github.com/spf13/viper" + packages = ["."] + revision = "25b30aa063fc18e48662b86996252eabdcf2f0c7" + version = "v1.0.0" + +[[projects]] + name = "github.com/ugorji/go" + packages = ["codec"] + revision = "ded73eae5db7e7a0ef6f55aace87a2873c5d2b74" + +[[projects]] + name = "github.com/xiang90/probing" + packages = ["."] + revision = "07dd2e8dfe18522e9c447ba95f2fe95262f63bb2" + version = "0.0.1" + +[[projects]] + name = "golang.org/x/crypto" + packages = [ + "bcrypt", + "blowfish", + "ssh/terminal" + ] + revision = "81e90905daefcd6fd217b62423c0908922eadb30" + +[[projects]] + name = "golang.org/x/net" + packages = [ + "context", + "context/ctxhttp", + "html", + "html/atom", + "html/charset", + "http2", + "http2/hpack", + "idna", + "internal/timeseries", + "lex/httplex", + "trace", + "websocket" + ] + revision = "1c05540f6879653db88113bc4a2b70aec4bd491f" + +[[projects]] + name = "golang.org/x/oauth2" + packages = [ + ".", + "google", + "internal", + "jws", + "jwt" + ] + revision = "a6bd8cefa1811bd24b86f8902872e4e8225f74c4" + +[[projects]] + name = "golang.org/x/sys" + packages = [ + "unix", + "windows" + ] + revision = "95c6576299259db960f6c5b9b69ea52422860fce" + +[[projects]] + name = "golang.org/x/text" + packages = [ + "cases", + "encoding", + "encoding/charmap", + "encoding/htmlindex", + "encoding/internal", + "encoding/internal/identifier", + "encoding/japanese", + "encoding/korean", + "encoding/simplifiedchinese", + "encoding/traditionalchinese", + "encoding/unicode", + "internal", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "internal/utf8internal", + "language", + "runes", + "secure/bidirule", + "secure/precis", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable", + "width" + ] + revision = "b19bf474d317b857955b12035d2c5acb57ce8b01" + +[[projects]] + name = "google.golang.org/appengine" + packages = [ + ".", + "internal", + "internal/app_identity", + "internal/base", + "internal/datastore", + "internal/log", + "internal/modules", + "internal/remote_api", + "internal/urlfetch", + "urlfetch" + ] + revision = "24e4144ec923c2374f6b06610c0df16a9222c3d9" + +[[projects]] + name = "google.golang.org/genproto" + packages = [ + "googleapis/api/annotations", + "googleapis/rpc/status" + ] + revision = "09f6ed296fc66555a25fe4ce95173148778dfa85" + +[[projects]] + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "codes", + "connectivity", + "credentials", + "grpclb/grpc_lb_v1/messages", + "grpclog", + "health/grpc_health_v1", + "internal", + "keepalive", + "metadata", + "naming", + "peer", + "resolver", + "stats", + "status", + "tap", + "transport" + ] + revision = "5b3c4e850e90a4cf6a20ebd46c8b32a0a3afcb9e" + version = "v1.7.5" + +[[projects]] + name = "gopkg.in/inf.v0" + packages = ["."] + revision = "3887ee99ecf07df5b447e9b00d9c0b2adaa9f3e4" + version = "v0.9.0" + +[[projects]] + name = "gopkg.in/natefinch/lumberjack.v2" + packages = ["."] + revision = "20b71e5b60d756d3d2f80def009790325acc2b23" + +[[projects]] + name = "gopkg.in/yaml.v2" + packages = ["."] + revision = "53feefa2559fb8dfa8d81baad31be332c97d6c77" + +[[projects]] + name = "k8s.io/api" + packages = [ + "admission/v1beta1", + "admissionregistration/v1alpha1", + "admissionregistration/v1beta1", + "apps/v1", + "apps/v1beta1", + "apps/v1beta2", + "authentication/v1", + "authentication/v1beta1", + "authorization/v1", + "authorization/v1beta1", + "autoscaling/v1", + "autoscaling/v2beta1", + "batch/v1", + "batch/v1beta1", + "batch/v2alpha1", + "certificates/v1beta1", + "core/v1", + "events/v1beta1", + "extensions/v1beta1", + "networking/v1", + "policy/v1beta1", + "rbac/v1", + "rbac/v1alpha1", + "rbac/v1beta1", + "scheduling/v1alpha1", + "settings/v1alpha1", + "storage/v1", + "storage/v1alpha1", + "storage/v1beta1" + ] + revision = "006a217681ae70cbacdd66a5e2fca1a61a8ff28e" + version = "kubernetes-1.9.1" + +[[projects]] + name = "k8s.io/apimachinery" + packages = [ + "pkg/api/equality", + "pkg/api/errors", + "pkg/api/meta", + "pkg/api/resource", + "pkg/api/testing", + "pkg/api/testing/fuzzer", + "pkg/api/testing/roundtrip", + "pkg/api/validation", + "pkg/api/validation/path", + "pkg/apimachinery", + "pkg/apimachinery/announced", + "pkg/apimachinery/registered", + "pkg/apis/meta/fuzzer", + "pkg/apis/meta/internalversion", + "pkg/apis/meta/v1", + "pkg/apis/meta/v1/unstructured", + "pkg/apis/meta/v1/validation", + "pkg/apis/meta/v1alpha1", + "pkg/conversion", + "pkg/conversion/queryparams", + "pkg/fields", + "pkg/labels", + "pkg/runtime", + "pkg/runtime/schema", + "pkg/runtime/serializer", + "pkg/runtime/serializer/json", + "pkg/runtime/serializer/protobuf", + "pkg/runtime/serializer/recognizer", + "pkg/runtime/serializer/streaming", + "pkg/runtime/serializer/versioning", + "pkg/selection", + "pkg/types", + "pkg/util/cache", + "pkg/util/clock", + "pkg/util/diff", + "pkg/util/errors", + "pkg/util/framer", + "pkg/util/httpstream", + "pkg/util/intstr", + "pkg/util/json", + "pkg/util/mergepatch", + "pkg/util/net", + "pkg/util/proxy", + "pkg/util/rand", + "pkg/util/runtime", + "pkg/util/sets", + "pkg/util/strategicpatch", + "pkg/util/uuid", + "pkg/util/validation", + "pkg/util/validation/field", + "pkg/util/wait", + "pkg/util/waitgroup", + "pkg/util/yaml", + "pkg/version", + "pkg/watch", + "third_party/forked/golang/json", + "third_party/forked/golang/netutil", + "third_party/forked/golang/reflect" + ] + revision = "68f9c3a1feb3140df59c67ced62d3a5df8e6c9c2" + version = "kubernetes-1.9.1" + +[[projects]] + name = "k8s.io/apiserver" + packages = [ + "pkg/admission", + "pkg/admission/configuration", + "pkg/admission/initializer", + "pkg/admission/metrics", + "pkg/admission/plugin/initialization", + "pkg/admission/plugin/namespace/lifecycle", + "pkg/admission/plugin/webhook/config", + "pkg/admission/plugin/webhook/config/apis/webhookadmission", + "pkg/admission/plugin/webhook/config/apis/webhookadmission/v1alpha1", + "pkg/admission/plugin/webhook/errors", + "pkg/admission/plugin/webhook/mutating", + "pkg/admission/plugin/webhook/namespace", + "pkg/admission/plugin/webhook/request", + "pkg/admission/plugin/webhook/rules", + "pkg/admission/plugin/webhook/validating", + "pkg/admission/plugin/webhook/versioned", + "pkg/apis/apiserver", + "pkg/apis/apiserver/install", + "pkg/apis/apiserver/v1alpha1", + "pkg/apis/audit", + "pkg/apis/audit/install", + "pkg/apis/audit/v1alpha1", + "pkg/apis/audit/v1beta1", + "pkg/apis/audit/validation", + "pkg/audit", + "pkg/audit/policy", + "pkg/authentication/authenticator", + "pkg/authentication/authenticatorfactory", + "pkg/authentication/group", + "pkg/authentication/request/anonymous", + "pkg/authentication/request/bearertoken", + "pkg/authentication/request/headerrequest", + "pkg/authentication/request/union", + "pkg/authentication/request/websocket", + "pkg/authentication/request/x509", + "pkg/authentication/serviceaccount", + "pkg/authentication/token/tokenfile", + "pkg/authentication/user", + "pkg/authorization/authorizer", + "pkg/authorization/authorizerfactory", + "pkg/authorization/union", + "pkg/endpoints", + "pkg/endpoints/discovery", + "pkg/endpoints/filters", + "pkg/endpoints/handlers", + "pkg/endpoints/handlers/negotiation", + "pkg/endpoints/handlers/responsewriters", + "pkg/endpoints/metrics", + "pkg/endpoints/openapi", + "pkg/endpoints/request", + "pkg/features", + "pkg/registry/generic", + "pkg/registry/generic/registry", + "pkg/registry/rest", + "pkg/server", + "pkg/server/filters", + "pkg/server/healthz", + "pkg/server/httplog", + "pkg/server/mux", + "pkg/server/options", + "pkg/server/routes", + "pkg/server/routes/data/swagger", + "pkg/server/storage", + "pkg/storage", + "pkg/storage/errors", + "pkg/storage/etcd", + "pkg/storage/etcd/metrics", + "pkg/storage/etcd/util", + "pkg/storage/etcd3", + "pkg/storage/etcd3/preflight", + "pkg/storage/names", + "pkg/storage/storagebackend", + "pkg/storage/storagebackend/factory", + "pkg/storage/value", + "pkg/util/feature", + "pkg/util/flag", + "pkg/util/flushwriter", + "pkg/util/logs", + "pkg/util/trace", + "pkg/util/webhook", + "pkg/util/wsstream", + "plugin/pkg/audit/log", + "plugin/pkg/audit/webhook", + "plugin/pkg/authenticator/token/webhook", + "plugin/pkg/authorizer/webhook" + ] + revision = "d0762227e2dd234c2db8efc3946c33ad2453c9e8" + version = "kubernetes-1.9.1" + +[[projects]] + name = "k8s.io/client-go" + packages = [ + "discovery", + "discovery/fake", + "informers", + "informers/admissionregistration", + "informers/admissionregistration/v1alpha1", + "informers/admissionregistration/v1beta1", + "informers/apps", + "informers/apps/v1", + "informers/apps/v1beta1", + "informers/apps/v1beta2", + "informers/autoscaling", + "informers/autoscaling/v1", + "informers/autoscaling/v2beta1", + "informers/batch", + "informers/batch/v1", + "informers/batch/v1beta1", + "informers/batch/v2alpha1", + "informers/certificates", + "informers/certificates/v1beta1", + "informers/core", + "informers/core/v1", + "informers/events", + "informers/events/v1beta1", + "informers/extensions", + "informers/extensions/v1beta1", + "informers/internalinterfaces", + "informers/networking", + "informers/networking/v1", + "informers/policy", + "informers/policy/v1beta1", + "informers/rbac", + "informers/rbac/v1", + "informers/rbac/v1alpha1", + "informers/rbac/v1beta1", + "informers/scheduling", + "informers/scheduling/v1alpha1", + "informers/settings", + "informers/settings/v1alpha1", + "informers/storage", + "informers/storage/v1", + "informers/storage/v1alpha1", + "informers/storage/v1beta1", + "kubernetes", + "kubernetes/fake", + "kubernetes/scheme", + "kubernetes/typed/admissionregistration/v1alpha1", + "kubernetes/typed/admissionregistration/v1alpha1/fake", + "kubernetes/typed/admissionregistration/v1beta1", + "kubernetes/typed/admissionregistration/v1beta1/fake", + "kubernetes/typed/apps/v1", + "kubernetes/typed/apps/v1/fake", + "kubernetes/typed/apps/v1beta1", + "kubernetes/typed/apps/v1beta1/fake", + "kubernetes/typed/apps/v1beta2", + "kubernetes/typed/apps/v1beta2/fake", + "kubernetes/typed/authentication/v1", + "kubernetes/typed/authentication/v1/fake", + "kubernetes/typed/authentication/v1beta1", + "kubernetes/typed/authentication/v1beta1/fake", + "kubernetes/typed/authorization/v1", + "kubernetes/typed/authorization/v1/fake", + "kubernetes/typed/authorization/v1beta1", + "kubernetes/typed/authorization/v1beta1/fake", + "kubernetes/typed/autoscaling/v1", + "kubernetes/typed/autoscaling/v1/fake", + "kubernetes/typed/autoscaling/v2beta1", + "kubernetes/typed/autoscaling/v2beta1/fake", + "kubernetes/typed/batch/v1", + "kubernetes/typed/batch/v1/fake", + "kubernetes/typed/batch/v1beta1", + "kubernetes/typed/batch/v1beta1/fake", + "kubernetes/typed/batch/v2alpha1", + "kubernetes/typed/batch/v2alpha1/fake", + "kubernetes/typed/certificates/v1beta1", + "kubernetes/typed/certificates/v1beta1/fake", + "kubernetes/typed/core/v1", + "kubernetes/typed/core/v1/fake", + "kubernetes/typed/events/v1beta1", + "kubernetes/typed/events/v1beta1/fake", + "kubernetes/typed/extensions/v1beta1", + "kubernetes/typed/extensions/v1beta1/fake", + "kubernetes/typed/networking/v1", + "kubernetes/typed/networking/v1/fake", + "kubernetes/typed/policy/v1beta1", + "kubernetes/typed/policy/v1beta1/fake", + "kubernetes/typed/rbac/v1", + "kubernetes/typed/rbac/v1/fake", + "kubernetes/typed/rbac/v1alpha1", + "kubernetes/typed/rbac/v1alpha1/fake", + "kubernetes/typed/rbac/v1beta1", + "kubernetes/typed/rbac/v1beta1/fake", + "kubernetes/typed/scheduling/v1alpha1", + "kubernetes/typed/scheduling/v1alpha1/fake", + "kubernetes/typed/settings/v1alpha1", + "kubernetes/typed/settings/v1alpha1/fake", + "kubernetes/typed/storage/v1", + "kubernetes/typed/storage/v1/fake", + "kubernetes/typed/storage/v1alpha1", + "kubernetes/typed/storage/v1alpha1/fake", + "kubernetes/typed/storage/v1beta1", + "kubernetes/typed/storage/v1beta1/fake", + "listers/admissionregistration/v1alpha1", + "listers/admissionregistration/v1beta1", + "listers/apps/v1", + "listers/apps/v1beta1", + "listers/apps/v1beta2", + "listers/autoscaling/v1", + "listers/autoscaling/v2beta1", + "listers/batch/v1", + "listers/batch/v1beta1", + "listers/batch/v2alpha1", + "listers/certificates/v1beta1", + "listers/core/v1", + "listers/events/v1beta1", + "listers/extensions/v1beta1", + "listers/networking/v1", + "listers/policy/v1beta1", + "listers/rbac/v1", + "listers/rbac/v1alpha1", + "listers/rbac/v1beta1", + "listers/scheduling/v1alpha1", + "listers/settings/v1alpha1", + "listers/storage/v1", + "listers/storage/v1alpha1", + "listers/storage/v1beta1", + "pkg/version", + "plugin/pkg/client/auth", + "plugin/pkg/client/auth/azure", + "plugin/pkg/client/auth/gcp", + "plugin/pkg/client/auth/oidc", + "plugin/pkg/client/auth/openstack", + "rest", + "rest/fake", + "rest/watch", + "testing", + "third_party/forked/golang/template", + "tools/auth", + "tools/cache", + "tools/clientcmd", + "tools/clientcmd/api", + "tools/clientcmd/api/latest", + "tools/clientcmd/api/v1", + "tools/leaderelection", + "tools/leaderelection/resourcelock", + "tools/metrics", + "tools/pager", + "tools/record", + "tools/reference", + "transport", + "util/buffer", + "util/cert", + "util/flowcontrol", + "util/homedir", + "util/integer", + "util/jsonpath", + "util/workqueue" + ] + revision = "9389c055a838d4f208b699b3c7c51b70f2368861" + version = "kubernetes-1.9.1" + +[[projects]] + name = "k8s.io/code-generator" + packages = [ + "cmd/client-gen", + "cmd/client-gen/args", + "cmd/client-gen/generators", + "cmd/client-gen/generators/fake", + "cmd/client-gen/generators/scheme", + "cmd/client-gen/generators/util", + "cmd/client-gen/path", + "cmd/client-gen/types", + "cmd/conversion-gen", + "cmd/conversion-gen/generators", + "cmd/deepcopy-gen", + "cmd/defaulter-gen", + "cmd/informer-gen", + "cmd/informer-gen/generators", + "cmd/lister-gen", + "cmd/lister-gen/generators", + "cmd/openapi-gen" + ] + revision = "91d3f6a57905178524105a085085901bb73bd3dc" + version = "kubernetes-1.9.1" + +[[projects]] + name = "k8s.io/gengo" + packages = [ + "args", + "examples/deepcopy-gen/generators", + "examples/defaulter-gen/generators", + "examples/set-gen/sets", + "generator", + "namer", + "parser", + "types" + ] + revision = "b58fc7edb82e0c6ffc9b8aef61813c7261b785d4" + +[[projects]] + name = "k8s.io/kube-openapi" + packages = [ + "pkg/builder", + "pkg/common", + "pkg/generators", + "pkg/handler", + "pkg/util", + "pkg/util/proto" + ] + revision = "39a7bf85c140f972372c2a0d1ee40adbf0c8bfe1" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "bb20b9c7565cc1ee61e78b379ad5593b79a162bbda8fe255f1c7110eda0ac3da" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml new file mode 100644 index 00000000000..2d57fb362a7 --- /dev/null +++ b/Gopkg.toml @@ -0,0 +1,66 @@ +# Tips for using dep: +# * Run dep ensure to sync your Gopkg.lock and vendor directory with your Gopkg.toml and code. +# * Gopkg.toml tells dep: +# * How to upgrade your dependencies. +# * How to resolve differences between the various versions that you vs. your dependencies require. +# * What versions downstream consumers of your code as a library should use. +# * You don't have to add every dependency that you use to Gopkg.toml, dep knows +# your dependencies from the imports in your code, and uses the lock to stick to a particular revision. +# * In case of merge conflicts, focus on fixing Gopkg.toml, and then re-run dep ensure to update Gopkg.lock. +# * Isolate changes to the vendor/ directory in a separate commit so it's easier to review your pull request. + + +# Force dep to vendor the code generators, which aren't imported just used at dev time. +# Picking a subpackage with Go code won't be necessary once https://github.com/golang/dep/issues/1306 is implemented. +required = [ + "github.com/jteeuwen/go-bindata/go-bindata", + "k8s.io/code-generator/cmd/defaulter-gen", + "k8s.io/code-generator/cmd/deepcopy-gen", + "k8s.io/code-generator/cmd/conversion-gen", + "k8s.io/code-generator/cmd/client-gen", + "k8s.io/code-generator/cmd/lister-gen", + "k8s.io/code-generator/cmd/informer-gen", + "k8s.io/code-generator/cmd/openapi-gen", + "k8s.io/gengo/args", + "github.com/kubernetes/repo-infra/kazel", +] + +# in sync with Kubernetes master (k8s 1.10 future release), but different from Kubernetes etcd version (3.1.10), +# but seems to have some improvements in memory consumption for embedded mode used in integration tests? +# (with v3.1.10 etcd pod crashes in Travis CI while we run tests) +# see https://github.com/kubernetes/kubernetes/blob/v1.9.1/Godeps/Godeps.json +[[constraint]] + name = "github.com/coreos/etcd" + version = "3.2.13" + +# We want Nov 4, 2014 version as the Jul 24, 2015 version (latest version) +# introduces bug documented in issue 1187 +# Using an override to not unncessarily restrict downstream consumers who +# won't need this exact revision. +[[override]] + name = "github.com/golang/glog" + revision = "44145f04b68cf362d9c4df2182967c2275eaefed" + +[[constraint]] + name = "github.com/spf13/viper" + version = "~1.0.0" + +[[constraint]] + name = "k8s.io/api" + version = "kubernetes-1.9.1" + +[[constraint]] + name = "k8s.io/apimachinery" + version = "kubernetes-1.9.1" + +[[constraint]] + name = "k8s.io/apiserver" + version = "kubernetes-1.9.1" + +[[constraint]] + name = "k8s.io/client-go" + version = "kubernetes-1.9.1" + +[[constraint]] + name = "k8s.io/code-generator" + version = "kubernetes-1.9.1" diff --git a/Makefile b/Makefile index 71abb7f470b..8ae2d171f9a 100644 --- a/Makefile +++ b/Makefile @@ -81,12 +81,6 @@ SERVICE_CATALOG_MUTABLE_IMAGE = $(REGISTRY)service-catalog-$(ARCH):$(MUTABLE USER_BROKER_IMAGE = $(REGISTRY)user-broker-$(ARCH):$(VERSION) USER_BROKER_MUTABLE_IMAGE = $(REGISTRY)user-broker-$(ARCH):$(MUTABLE_TAG) -# precheck to avoid kubernetes-incubator/service-catalog#361 -$(if $(realpath vendor/k8s.io/apimachinery/vendor), \ - $(error the vendor directory exists in the apimachinery \ - vendored source and must be flattened. \ - run 'glide i -v')) - ifdef UNIT_TESTS UNIT_TEST_FLAGS=-run $(UNIT_TESTS) -v endif @@ -111,6 +105,7 @@ else scBuildImageTarget = .scBuildImage endif +# Even though we migrated to dep, it doesn't replace the `glide nv` command NON_VENDOR_DIRS = $(shell $(DOCKER_CMD) glide nv) # This section builds the output binaries. @@ -189,7 +184,7 @@ $(BINDIR)/e2e.test: .init $(NEWEST_E2ETEST_SOURCE) $(NEWEST_GO_FILE) # Util targets ############## .PHONY: verify verify-generated verify-client-gen -verify: .init .generate_files verify-generated verify-client-gen +verify: .init .generate_files verify-generated verify-client-gen verify-vendor @echo Running gofmt: @$(DOCKER_CMD) gofmt -l -s $(TOP_TEST_DIRS) $(TOP_SRC_DIRS)>.out 2>&1||true @[ ! -s .out ] || \ @@ -212,12 +207,12 @@ verify: .init .generate_files verify-generated verify-client-gen @# $(DOCKER_CMD) go vet $(NON_VENDOR_DIRS) @echo Running repo-infra verify scripts - @$(DOCKER_CMD) vendor/github.com/kubernetes/repo-infra/verify/verify-boilerplate.sh --rootdir=. | grep -v generated > .out 2>&1 || true + @$(DOCKER_CMD) vendor/github.com/kubernetes/repo-infra/verify/verify-boilerplate.sh --rootdir=. | grep -v generated | grep -v .pkg > .out 2>&1 || true @[ ! -s .out ] || (cat .out && rm .out && false) @rm .out @# @echo Running href checker$(SKIP_COMMENT): - @$(DOCKER_CMD) verify-links.sh -t $(SKIP_HTTP) . + @$(DOCKER_CMD) verify-links.sh -s .pkg -t $(SKIP_HTTP) . @echo Running errexit checker: @$(DOCKER_CMD) build/verify-errexit.sh @echo Running tag verification: @@ -236,7 +231,7 @@ coverage: .init $(DOCKER_CMD) contrib/hack/coverage.sh --html "$(COVERAGE)" \ $(addprefix ./,$(TEST_DIRS)) -test: .init build test-unit test-integration +test: .init build test-unit test-integration test-dep # this target checks to see if the go binary is installed on the host .PHONY: check-go @@ -246,7 +241,7 @@ check-go: exit 1; \ fi -# this target uses the host-local go installation to test +# this target uses the host-local go installation to test .PHONY: test-unit-native test-unit-native: check-go go test $(addprefix ${SC_PKG}/,${TEST_DIRS}) @@ -382,3 +377,13 @@ release-push-%: svcat: $(BINDIR)/svcat $(BINDIR)/svcat: .init .generate_files cmd/svcat/main.go $(DOCKER_CMD) $(GO_BUILD) -o $@ $(SC_PKG)/cmd/svcat + +# Dependency management via dep (https://golang.github.io/dep) +PHONHY: verify-vendor test-dep +verify-vendor: .init + # Verify that vendor/ is in sync with Gopkg.lock + $(DOCKER_CMD) $(BUILD_DIR)/verify-vendor.sh + +test-dep: .init + # Test that a downstream consumer of our client library can use dep + $(DOCKER_CMD) test/test-dep.sh diff --git a/build/build-image/Dockerfile b/build/build-image/Dockerfile index 00e439c6deb..1e391919efa 100644 --- a/build/build-image/Dockerfile +++ b/build/build-image/Dockerfile @@ -23,6 +23,11 @@ ENV GLIDE_VERSION=v0.12.3 \ RUN curl -sSL https://github.com/Masterminds/glide/releases/download/$GLIDE_VERSION/glide-$GLIDE_VERSION-linux-amd64.tar.gz \ | tar -vxz -C /usr/local/bin --strip=1 +# Install dep as root +ENV DEP_VERSION=v0.3.2 +RUN curl -sSL -o /usr/local/bin/dep https://github.com/golang/dep/releases/download/$DEP_VERSION/dep-linux-amd64 && \ + chmod +x /usr/local/bin/dep + # Install etcd RUN curl -sSL https://github.com/coreos/etcd/releases/download/v3.1.10/etcd-v3.1.10-linux-amd64.tar.gz \ | tar -vxz -C /usr/local/bin --strip=1 etcd-v3.1.10-linux-amd64/etcd diff --git a/build/verify-errexit.sh b/build/verify-errexit.sh index 742a1c229ed..b5a4784917d 100755 --- a/build/verify-errexit.sh +++ b/build/verify-errexit.sh @@ -28,7 +28,7 @@ REPO_ROOT=$(dirname "${BASH_SOURCE}")/.. if [ "$*" != "" ]; then args="$*" else - args=$(ls "$REPO_ROOT" | grep -v vendor | grep -v glide) + args=$(ls "$REPO_ROOT" | grep -v vendor | grep -v Gopkg) fi # Gather the list of files that appear to be shell scripts. diff --git a/build/verify-vendor.sh b/build/verify-vendor.sh new file mode 100755 index 00000000000..eb317e597a1 --- /dev/null +++ b/build/verify-vendor.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +dep ensure --vendor-only +if [[ -n "$(git status --porcelain vendor)" ]]; then + echo 'vendor/ is out-of-date: run `dep ensure --vendor-only` and then check in the changes' + git status --porcelain vendor + dep version + exit 1 +fi + +echo "Verified that vendor/ is in sync with Gopkg.lock" diff --git a/contrib/examples/consumer/Gopkg.toml b/contrib/examples/consumer/Gopkg.toml new file mode 100644 index 00000000000..be84ffac28a --- /dev/null +++ b/contrib/examples/consumer/Gopkg.toml @@ -0,0 +1,3 @@ +[[constraint]] + branch = "master" + name = "github.com/kubernetes-incubator/service-catalog" diff --git a/contrib/examples/consumer/main.go b/contrib/examples/consumer/main.go new file mode 100644 index 00000000000..47b2b018ffe --- /dev/null +++ b/contrib/examples/consumer/main.go @@ -0,0 +1,30 @@ +/* +Copyright 2018 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "fmt" + "github.com/kubernetes-incubator/service-catalog/pkg/svcat" +) + +func main() { + a, _ := svcat.NewApp("", "") + brokers, _ := a.RetrieveBrokers() + for _, b := range brokers { + fmt.Println(b.Name) + } +} diff --git a/contrib/jenkins/init_build.sh b/contrib/jenkins/init_build.sh index b1d0c1b169d..95742d53c75 100755 --- a/contrib/jenkins/init_build.sh +++ b/contrib/jenkins/init_build.sh @@ -22,6 +22,7 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" GO_VERSION='1.9' HELM_VERSION='v2.7.0' GLIDE_VERSION='v0.12.3' +DEP_VERSION='v0.3.2' function update-golang() { # Check version of golang @@ -92,10 +93,39 @@ function update-glide() { } +function update-dep() { + # Check version of dep + local current="$(dep --version 2>/dev/null || echo "unknown")" + + # dep version prints its output in the format: + # dep: + # version : v0.3.2 + # build date : + # git hash : + # go version : go1.9.2 + # go compiler : gc + # platform : darwin/amd64 + # To isolate the version string, we include the leading space + # in the comparison, and ommit the trailing wildcard. + if [[ "${current}" == *" ${DEP_VERSION}"* ]]; then + echo "dep is up-to-date: ${current}" + else + echo "Upgrading dep ${current} to ${DEP_VERSION}" + + # Install new dep. + local dep_url='https://github.com/golang/dep/releases/download/' + dep_url+="${DEP_VERSION}/dep-linux-amd64" + + curl -sSL -o /usr/local/bin/dep "${dep_url}" && chmod +x /usr/local/bin/dep \ + || { echo "Cannot upgrade dep to ${DEP_VERSION}"; return 1; } + fi +} + function main() { update-golang || error_exit 'Failed to update golang' update-helm || error_exit 'Failed to update helm' update-glide || error_exit 'Failed to update glide' + update-dep || error_exit 'Failed to update dep' } main diff --git a/docs/devguide.md b/docs/devguide.md index 16cbbb51531..5af9a405918 100644 --- a/docs/devguide.md +++ b/docs/devguide.md @@ -18,7 +18,6 @@ have found possible (or practical). Below is a summary of the repository's layout: . - ├── .glide # Glide cache (untracked) ├── bin # Destination for binaries compiled for linux/amd64 (untracked) ├── build # Contains build-related scripts and subdirectories containing Dockerfiles ├── charts # Helm charts for deployment @@ -40,7 +39,9 @@ layout: ├── pkg # Contains all non-"main" Go packages ├── plugin # Plugins for API server ├── test # Integration and e2e tests - └── vendor # Glide-managed dependencies + ├── vendor # dep-managed dependencies + ├── Gopkg.toml # dep manifest + └── Gopkg.lock # dep lock (autogenerated, do not edit) ## Working on Issues @@ -150,7 +151,7 @@ To build the service-catalog: The above will build all executables and place them in the `bin` directory. This is done within a Docker container-- meaning you do not need to have all of the -necessary tooling installed on your host (such as a golang compiler or glide). +necessary tooling installed on your host (such as a golang compiler or dep). Building outside the container is possible, but not officially supported. Note, this will do the basic build of the service catalog. There are more diff --git a/glide.lock b/glide.lock deleted file mode 100644 index c4888b71359..00000000000 --- a/glide.lock +++ /dev/null @@ -1,813 +0,0 @@ -hash: 718db7feec8771e9177c8a68fdb30c6fba1ed2c14610934eaed99061884a3ac9 -updated: 2018-01-18T10:00:00.000000-06:00 -imports: -- name: bitbucket.org/ww/goautoneg - version: 75cd24fc2f2c2a2088577d12123ddee5f54e0675 -- name: cloud.google.com/go - version: 3b1ae45394a234c385be014e9a488f2bb6eef821 - subpackages: - - compute/metadata - - internal -- name: code.cloudfoundry.org/lager - version: dfcbcba2dd4a5228c43b0292d219d5c010daed3a -- name: github.com/Azure/go-autorest - version: e14a70c556c8e0db173358d1a903dca345a8e75e - subpackages: - - autorest - - autorest/adal - - autorest/azure - - autorest/date -- name: github.com/beorn7/perks - version: 3ac7bf7a47d159a033b107610db8a1b6575507a4 - subpackages: - - quantile -- name: github.com/coreos/etcd - version: 95a726a27e09030f9ccbd9982a1508f5a6d25ada - subpackages: - - alarm - - auth - - auth/authpb - - client - - clientv3 - - clientv3/concurrency - - compactor - - discovery - - embed - - error - - etcdserver - - etcdserver/api - - etcdserver/api/etcdhttp - - etcdserver/api/v2http - - etcdserver/api/v2http/httptypes - - etcdserver/api/v3client - - etcdserver/api/v3election - - etcdserver/api/v3election/v3electionpb - - etcdserver/api/v3election/v3electionpb/gw - - etcdserver/api/v3lock - - etcdserver/api/v3lock/v3lockpb - - etcdserver/api/v3lock/v3lockpb/gw - - etcdserver/api/v3rpc - - etcdserver/api/v3rpc/rpctypes - - etcdserver/auth - - etcdserver/etcdserverpb - - etcdserver/etcdserverpb/gw - - etcdserver/membership - - etcdserver/stats - - lease - - lease/leasehttp - - lease/leasepb - - mvcc - - mvcc/backend - - mvcc/mvccpb - - pkg/adt - - pkg/contention - - pkg/cors - - pkg/cpuutil - - pkg/crc - - pkg/debugutil - - pkg/fileutil - - pkg/httputil - - pkg/idutil - - pkg/ioutil - - pkg/logutil - - pkg/monotime - - pkg/netutil - - pkg/pathutil - - pkg/pbutil - - pkg/runtime - - pkg/schedule - - pkg/srv - - pkg/tlsutil - - pkg/transport - - pkg/types - - pkg/wait - - proxy/grpcproxy/adapter - - raft - - raft/raftpb - - rafthttp - - snap - - snap/snappb - - store - - version - - wal - - wal/walpb -- name: github.com/coreos/go-semver - version: 568e959cd89871e61434c1143528d9162da89ef2 - subpackages: - - semver -- name: github.com/coreos/go-systemd - version: 48702e0da86bd25e76cfef347e2adeb434a0d0a6 - subpackages: - - daemon - - journal -- name: github.com/davecgh/go-spew - version: 782f4967f2dc4564575ca782fe2d04090b5faca8 - subpackages: - - spew -- name: github.com/dgrijalva/jwt-go - version: 01aeca54ebda6e0fbfafd0a524d234159c05ec20 -- name: github.com/elazarl/go-bindata-assetfs - version: 3dcc96556217539f50599357fb481ac0dc7439b9 -- name: github.com/emicklei/go-restful - version: ff4f55a206334ef123e4f79bbf348980da81ca46 - subpackages: - - log - - swagger -- name: github.com/emicklei/go-restful-swagger12 - version: dcef7f55730566d41eae5db10e7d6981829720f6 -- name: github.com/evanphx/json-patch - version: 944e07253867aacae43c04b2e6a239005443f33a -- name: github.com/fsnotify/fsnotify - version: 4da3e2cfbabc9f751898f250b49f2439785783a1 -- name: github.com/ghodss/yaml - version: 73d445a93680fa1a78ae23a5839bad48f32ba1ee -- name: github.com/go-openapi/jsonpointer - version: 46af16f9f7b149af66e5d1bd010e3574dc06de98 -- name: github.com/go-openapi/jsonreference - version: 13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272 -- name: github.com/go-openapi/spec - version: 7abd5745472fff5eb3685386d5fb8bf38683154d -- name: github.com/go-openapi/swag - version: f3f9494671f93fcff853e3c6e9e948b3eb71e590 -- name: github.com/gogo/protobuf - version: c0656edd0d9eab7c66d1eb0c568f9039345796f7 - subpackages: - - proto - - sortkeys -- name: github.com/golang/glog - version: 44145f04b68cf362d9c4df2182967c2275eaefed -- name: github.com/golang/groupcache - version: 02826c3e79038b59d737d3b1c0a1d937f71a4433 - subpackages: - - lru -- name: github.com/golang/protobuf - version: 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 - subpackages: - - jsonpb - - proto - - protoc-gen-go/descriptor - - ptypes - - ptypes/any - - ptypes/duration - - ptypes/struct - - ptypes/timestamp -- name: github.com/google/btree - version: 7d79101e329e5a3adf994758c578dab82b90c017 -- name: github.com/google/gofuzz - version: 44d81051d367757e1c7c6a5a86423ece9afcf63c -- name: github.com/googleapis/gnostic - version: 0c5108395e2debce0d731cf0287ddf7242066aba - subpackages: - - OpenAPIv2 - - compiler - - extensions -- name: github.com/gophercloud/gophercloud - version: 8183543f90d1aef267a5ecc209f2e0715b355acb - subpackages: - - openstack - - openstack/identity/v2/tenants - - openstack/identity/v2/tokens - - openstack/identity/v3/tokens - - openstack/utils - - pagination -- name: github.com/gorilla/context - version: 215affda49addc4c8ef7e2534915df2c8c35c6cd -- name: github.com/gorilla/mux - version: 8096f47503459bcc74d1f4c487b7e6e42e5746b5 -- name: github.com/gregjones/httpcache - version: 787624de3eb7bd915c329cba748687a3b22666a6 - subpackages: - - diskcache -- name: github.com/hashicorp/errwrap - version: 7554cd9344cec97297fa6649b055a8c98c2a1e55 -- name: github.com/hashicorp/go-multierror - version: b7773ae218740a7be65057fc60b366a49b538a44 -- name: github.com/grpc-ecosystem/go-grpc-prometheus - version: 6b7015e65d366bf3f19b2b2a000a831940f0f7e0 -- name: github.com/grpc-ecosystem/grpc-gateway - version: 8cc3a55af3bcf171a1c23a90c4df9cf591706104 - subpackages: - - runtime - - runtime/internal - - utilities -- name: github.com/hashicorp/golang-lru - version: a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4 - subpackages: - - simplelru -- name: github.com/hashicorp/hcl - version: 23c074d0eceb2b8a5bfdbb271ab780cde70f05a8 - subpackages: - - hcl/ast - - hcl/parser - - hcl/scanner - - hcl/strconv - - hcl/token - - json/parser - - json/scanner - - json/token -- name: github.com/howeyc/gopass - version: bf9dde6d0d2c004a008c27aaee91170c786f6db8 -- name: github.com/imdario/mergo - version: 6633656539c1639d9d78127b7d47c622b5d7b6dc -- name: github.com/inconshreveable/mousetrap - version: 76626ae9c91c4f2a10f34cad8ce83ea42c93bb75 -- name: github.com/json-iterator/go - version: 36b14963da70d11297d313183d7e6388c8510e1e -- name: github.com/jteeuwen/go-bindata - version: a0ff2567cfb70903282db057e799fd826784d41d -- name: github.com/juju/ratelimit - version: 5b9ff866471762aa2ab2dced63c9fb6f53921342 -- name: github.com/kubernetes/repo-infra - version: 2d2eb5e12b4663fc4d764b5db9daab39334d3f37 -- name: github.com/magiconair/properties - version: 49d762b9817ba1c2e9d0c69183c2b4a8b8f1d934 -- name: github.com/mailru/easyjson - version: 2f5df55504ebc322e4d52d34df6a1f5b503bf26d - subpackages: - - buffer - - jlexer - - jwriter -- name: github.com/mattn/go-runewidth - version: 97311d9f7767e3d6f422ea06661bc2c7a19e8a5d -- name: github.com/matttproud/golang_protobuf_extensions - version: fc2b8d3a73c4867e51861bbdd5ae3c1f0869dd6a - subpackages: - - pbutil -- name: github.com/mitchellh/mapstructure - version: 06020f85339e21b2478f756a78e295255ffa4d6a -- name: github.com/mxk/go-flowrate - version: cca7078d478f8520f85629ad7c68962d31ed7682 - subpackages: - - flowrate -- name: github.com/NYTimes/gziphandler - version: 56545f4a5d46df9a6648819d1664c3a03a13ffdb -- name: github.com/olekukonko/tablewriter - version: 96aac992fc8b1a4c83841a6c3e7178d20d989625 -- name: github.com/onsi/ginkgo - version: 11459a886d9cd66b319dac7ef1e917ee221372c9 - subpackages: - - config - - internal/codelocation - - internal/containernode - - internal/failer - - internal/leafnodes - - internal/remote - - internal/spec - - internal/spec_iterator - - internal/specrunner - - internal/suite - - internal/testingtproxy - - internal/writer - - reporters - - reporters/stenographer - - reporters/stenographer/support/go-colorable - - reporters/stenographer/support/go-isatty - - types -- name: github.com/onsi/gomega - version: dcabb60a477c2b6f456df65037cb6708210fbb02 - subpackages: - - format - - internal/assertion - - internal/asyncassertion - - internal/oraclematcher - - internal/testingtsupport - - matchers - - matchers/support/goraph/bipartitegraph - - matchers/support/goraph/edge - - matchers/support/goraph/node - - matchers/support/goraph/util - - types -- name: github.com/pborman/uuid - version: ca53cad383cad2479bbba7f7a1a05797ec1386e4 -- name: github.com/pelletier/go-toml - version: 0131db6d737cfbbfb678f8b7d92e55e27ce46224 -- name: github.com/peterbourgon/diskv - version: 5f041e8faa004a95c88a202771f4cc3e991971e6 -- name: github.com/pivotal-cf/brokerapi - version: 35946a0079bda144d0c9ed68df36899451f90209 - subpackages: - - auth -- name: github.com/pkg/errors - version: a22138067af1c4942683050411a841ade67fe1eb -- name: github.com/pmorie/go-open-service-broker-client - version: 31d8027f493f8f23f850415d171c7c52a972a6f2 - subpackages: - - v2 - - v2/fake -- name: github.com/prometheus/client_golang - version: e7e903064f5e9eb5da98208bae10b475d4db0f8c - subpackages: - - prometheus - - prometheus/promhttp -- name: github.com/prometheus/client_model - version: fa8ad6fec33561be4280a8f0514318c79d7f6cb6 - subpackages: - - go -- name: github.com/prometheus/common - version: 13ba4ddd0caa9c28ca7b7bffe1dfa9ed8d5ef207 - subpackages: - - expfmt - - internal/bitbucket.org/ww/goautoneg - - model -- name: github.com/prometheus/procfs - version: 65c1f6f8f0fc1e2185eb9863a3bc751496404259 - subpackages: - - xfs -- name: github.com/PuerkitoBio/purell - version: 8a290539e2e8629dbc4e6bad948158f790ec31f4 -- name: github.com/PuerkitoBio/urlesc - version: 5bd2802263f21d8788851d5305584c82a5c75d7e -- name: github.com/satori/go.uuid - version: 5bf94b69c6b68ee1b541973bb8e1144db23a194b -- name: github.com/spf13/afero - version: 57afd63c68602b63ed976de00dd066ccb3c319db - subpackages: - - mem -- name: github.com/spf13/cast - version: acbeb36b902d72a7a4c18e8f3241075e7ab763e4 -- name: github.com/spf13/cobra - version: f62e98d28ab7ad31d707ba837a966378465c7b57 -- name: github.com/spf13/jwalterweatherman - version: 7c0cea34c8ece3fbeb2b27ab9b59511d360fb394 -- name: github.com/spf13/pflag - version: 9ff6c6923cfffbcd502984b8e0c80539a94968b7 -- name: github.com/spf13/viper - version: 25b30aa063fc18e48662b86996252eabdcf2f0c7 -- name: github.com/ugorji/go - version: ded73eae5db7e7a0ef6f55aace87a2873c5d2b74 - subpackages: - - codec -- name: golang.org/x/crypto - version: 81e90905daefcd6fd217b62423c0908922eadb30 - subpackages: - - bcrypt - - blowfish - - nacl/secretbox - - poly1305 - - salsa20/salsa - - ssh/terminal -- name: golang.org/x/net - version: 1c05540f6879653db88113bc4a2b70aec4bd491f - subpackages: - - context - - context/ctxhttp - - html - - html/atom - - html/charset - - http2 - - http2/hpack - - idna - - internal/timeseries - - lex/httplex - - trace - - websocket -- name: golang.org/x/oauth2 - version: a6bd8cefa1811bd24b86f8902872e4e8225f74c4 - subpackages: - - google - - internal - - jws - - jwt -- name: golang.org/x/sys - version: 95c6576299259db960f6c5b9b69ea52422860fce - subpackages: - - unix - - windows -- name: golang.org/x/text - version: b19bf474d317b857955b12035d2c5acb57ce8b01 - subpackages: - - cases - - encoding - - encoding/charmap - - encoding/htmlindex - - encoding/internal - - encoding/internal/identifier - - encoding/japanese - - encoding/korean - - encoding/simplifiedchinese - - encoding/traditionalchinese - - encoding/unicode - - internal - - internal/tag - - internal/utf8internal - - language - - runes - - secure/bidirule - - secure/precis - - transform - - unicode/bidi - - unicode/norm - - width -- name: google.golang.org/appengine - version: 24e4144ec923c2374f6b06610c0df16a9222c3d9 - subpackages: - - internal - - internal/app_identity - - internal/base - - internal/datastore - - internal/log - - internal/modules - - internal/remote_api - - internal/urlfetch - - urlfetch -- name: google.golang.org/genproto - version: 09f6ed296fc66555a25fe4ce95173148778dfa85 - subpackages: - - googleapis/api/annotations - - googleapis/rpc/status -- name: google.golang.org/grpc - version: 5b3c4e850e90a4cf6a20ebd46c8b32a0a3afcb9e - subpackages: - - balancer - - codes - - connectivity - - credentials - - grpclb/grpc_lb_v1/messages - - grpclog - - health/grpc_health_v1 - - internal - - keepalive - - metadata - - naming - - peer - - resolver - - stats - - status - - tap - - transport -- name: gopkg.in/inf.v0 - version: 3887ee99ecf07df5b447e9b00d9c0b2adaa9f3e4 -- name: gopkg.in/natefinch/lumberjack.v2 - version: 20b71e5b60d756d3d2f80def009790325acc2b23 -- name: gopkg.in/yaml.v2 - version: 53feefa2559fb8dfa8d81baad31be332c97d6c77 -- name: k8s.io/api - version: 006a217681ae70cbacdd66a5e2fca1a61a8ff28e - subpackages: - - admission/v1beta1 - - admissionregistration/v1alpha1 - - admissionregistration/v1beta1 - - apps/v1 - - apps/v1beta1 - - apps/v1beta2 - - authentication/v1 - - authentication/v1beta1 - - authorization/v1 - - authorization/v1beta1 - - autoscaling/v1 - - autoscaling/v2beta1 - - batch/v1 - - batch/v1beta1 - - batch/v2alpha1 - - certificates/v1beta1 - - core/v1 - - events/v1beta1 - - extensions/v1beta1 - - networking/v1 - - policy/v1beta1 - - rbac/v1 - - rbac/v1alpha1 - - rbac/v1beta1 - - scheduling/v1alpha1 - - settings/v1alpha1 - - storage/v1 - - storage/v1alpha1 - - storage/v1beta1 -- name: k8s.io/apimachinery - version: 68f9c3a1feb3140df59c67ced62d3a5df8e6c9c2 - subpackages: - - pkg/api/equality - - pkg/api/errors - - pkg/api/meta - - pkg/api/resource - - pkg/api/testing - - pkg/api/testing/fuzzer - - pkg/api/testing/roundtrip - - pkg/api/validation - - pkg/api/validation/path - - pkg/apimachinery - - pkg/apimachinery/announced - - pkg/apimachinery/registered - - pkg/apis/meta/fuzzer - - pkg/apis/meta/internalversion - - pkg/apis/meta/v1 - - pkg/apis/meta/v1/unstructured - - pkg/apis/meta/v1/validation - - pkg/apis/meta/v1alpha1 - - pkg/conversion - - pkg/conversion/queryparams - - pkg/fields - - pkg/labels - - pkg/runtime - - pkg/runtime/schema - - pkg/runtime/serializer - - pkg/runtime/serializer/json - - pkg/runtime/serializer/protobuf - - pkg/runtime/serializer/recognizer - - pkg/runtime/serializer/streaming - - pkg/runtime/serializer/versioning - - pkg/selection - - pkg/types - - pkg/util/cache - - pkg/util/clock - - pkg/util/diff - - pkg/util/errors - - pkg/util/framer - - pkg/util/httpstream - - pkg/util/intstr - - pkg/util/json - - pkg/util/mergepatch - - pkg/util/net - - pkg/util/proxy - - pkg/util/rand - - pkg/util/runtime - - pkg/util/sets - - pkg/util/strategicpatch - - pkg/util/uuid - - pkg/util/validation - - pkg/util/validation/field - - pkg/util/wait - - pkg/util/waitgroup - - pkg/util/yaml - - pkg/version - - pkg/watch - - third_party/forked/golang/json - - third_party/forked/golang/netutil - - third_party/forked/golang/reflect -- name: k8s.io/apiserver - version: d0762227e2dd234c2db8efc3946c33ad2453c9e8 - subpackages: - - pkg/admission - - pkg/admission/configuration - - pkg/admission/initializer - - pkg/admission/metrics - - pkg/admission/plugin/initialization - - pkg/admission/plugin/namespace/lifecycle - - pkg/admission/plugin/webhook/config - - pkg/admission/plugin/webhook/config/apis/webhookadmission - - pkg/admission/plugin/webhook/config/apis/webhookadmission/v1alpha1 - - pkg/admission/plugin/webhook/errors - - pkg/admission/plugin/webhook/mutating - - pkg/admission/plugin/webhook/namespace - - pkg/admission/plugin/webhook/request - - pkg/admission/plugin/webhook/rules - - pkg/admission/plugin/webhook/validating - - pkg/admission/plugin/webhook/versioned - - pkg/apis/apiserver - - pkg/apis/apiserver/install - - pkg/apis/apiserver/v1alpha1 - - pkg/apis/audit - - pkg/apis/audit/install - - pkg/apis/audit/v1alpha1 - - pkg/apis/audit/v1beta1 - - pkg/apis/audit/validation - - pkg/audit - - pkg/audit/policy - - pkg/authentication/authenticator - - pkg/authentication/authenticatorfactory - - pkg/authentication/group - - pkg/authentication/request/anonymous - - pkg/authentication/request/bearertoken - - pkg/authentication/request/headerrequest - - pkg/authentication/request/union - - pkg/authentication/request/websocket - - pkg/authentication/request/x509 - - pkg/authentication/serviceaccount - - pkg/authentication/token/tokenfile - - pkg/authentication/user - - pkg/authorization/authorizer - - pkg/authorization/authorizerfactory - - pkg/authorization/union - - pkg/endpoints - - pkg/endpoints/discovery - - pkg/endpoints/filters - - pkg/endpoints/handlers - - pkg/endpoints/handlers/negotiation - - pkg/endpoints/handlers/responsewriters - - pkg/endpoints/metrics - - pkg/endpoints/openapi - - pkg/endpoints/request - - pkg/features - - pkg/registry/generic - - pkg/registry/generic/registry - - pkg/registry/rest - - pkg/server - - pkg/server/filters - - pkg/server/healthz - - pkg/server/httplog - - pkg/server/mux - - pkg/server/options - - pkg/server/routes - - pkg/server/routes/data/swagger - - pkg/server/storage - - pkg/storage - - pkg/storage/errors - - pkg/storage/etcd - - pkg/storage/etcd/metrics - - pkg/storage/etcd/util - - pkg/storage/etcd3 - - pkg/storage/etcd3/preflight - - pkg/storage/names - - pkg/storage/storagebackend - - pkg/storage/storagebackend/factory - - pkg/storage/value - - pkg/util/feature - - pkg/util/flag - - pkg/util/flushwriter - - pkg/util/logs - - pkg/util/trace - - pkg/util/webhook - - pkg/util/wsstream - - plugin/pkg/audit/log - - plugin/pkg/audit/webhook - - plugin/pkg/authenticator/token/webhook - - plugin/pkg/authorizer/webhook -- name: k8s.io/client-go - version: 9389c055a838d4f208b699b3c7c51b70f2368861 - subpackages: - - discovery - - discovery/fake - - informers - - informers/admissionregistration - - informers/admissionregistration/v1alpha1 - - informers/admissionregistration/v1beta1 - - informers/apps - - informers/apps/v1 - - informers/apps/v1beta1 - - informers/apps/v1beta2 - - informers/autoscaling - - informers/autoscaling/v1 - - informers/autoscaling/v2beta1 - - informers/batch - - informers/batch/v1 - - informers/batch/v1beta1 - - informers/batch/v2alpha1 - - informers/certificates - - informers/certificates/v1beta1 - - informers/core - - informers/core/v1 - - informers/events - - informers/events/v1beta1 - - informers/extensions - - informers/extensions/v1beta1 - - informers/internalinterfaces - - informers/networking - - informers/networking/v1 - - informers/policy - - informers/policy/v1beta1 - - informers/rbac - - informers/rbac/v1 - - informers/rbac/v1alpha1 - - informers/rbac/v1beta1 - - informers/scheduling - - informers/scheduling/v1alpha1 - - informers/settings - - informers/settings/v1alpha1 - - informers/storage - - informers/storage/v1 - - informers/storage/v1alpha1 - - informers/storage/v1beta1 - - kubernetes - - kubernetes/fake - - kubernetes/scheme - - kubernetes/typed/admissionregistration/v1alpha1 - - kubernetes/typed/admissionregistration/v1alpha1/fake - - kubernetes/typed/admissionregistration/v1beta1 - - kubernetes/typed/admissionregistration/v1beta1/fake - - kubernetes/typed/apps/v1 - - kubernetes/typed/apps/v1/fake - - kubernetes/typed/apps/v1beta1 - - kubernetes/typed/apps/v1beta1/fake - - kubernetes/typed/apps/v1beta2 - - kubernetes/typed/apps/v1beta2/fake - - kubernetes/typed/authentication/v1 - - kubernetes/typed/authentication/v1/fake - - kubernetes/typed/authentication/v1beta1 - - kubernetes/typed/authentication/v1beta1/fake - - kubernetes/typed/authorization/v1 - - kubernetes/typed/authorization/v1/fake - - kubernetes/typed/authorization/v1beta1 - - kubernetes/typed/authorization/v1beta1/fake - - kubernetes/typed/autoscaling/v1 - - kubernetes/typed/autoscaling/v1/fake - - kubernetes/typed/autoscaling/v2beta1 - - kubernetes/typed/autoscaling/v2beta1/fake - - kubernetes/typed/batch/v1 - - kubernetes/typed/batch/v1/fake - - kubernetes/typed/batch/v1beta1 - - kubernetes/typed/batch/v1beta1/fake - - kubernetes/typed/batch/v2alpha1 - - kubernetes/typed/batch/v2alpha1/fake - - kubernetes/typed/certificates/v1beta1 - - kubernetes/typed/certificates/v1beta1/fake - - kubernetes/typed/core/v1 - - kubernetes/typed/core/v1/fake - - kubernetes/typed/events/v1beta1 - - kubernetes/typed/events/v1beta1/fake - - kubernetes/typed/extensions/v1beta1 - - kubernetes/typed/extensions/v1beta1/fake - - kubernetes/typed/networking/v1 - - kubernetes/typed/networking/v1/fake - - kubernetes/typed/policy/v1beta1 - - kubernetes/typed/policy/v1beta1/fake - - kubernetes/typed/rbac/v1 - - kubernetes/typed/rbac/v1/fake - - kubernetes/typed/rbac/v1alpha1 - - kubernetes/typed/rbac/v1alpha1/fake - - kubernetes/typed/rbac/v1beta1 - - kubernetes/typed/rbac/v1beta1/fake - - kubernetes/typed/scheduling/v1alpha1 - - kubernetes/typed/scheduling/v1alpha1/fake - - kubernetes/typed/settings/v1alpha1 - - kubernetes/typed/settings/v1alpha1/fake - - kubernetes/typed/storage/v1 - - kubernetes/typed/storage/v1/fake - - kubernetes/typed/storage/v1alpha1 - - kubernetes/typed/storage/v1alpha1/fake - - kubernetes/typed/storage/v1beta1 - - kubernetes/typed/storage/v1beta1/fake - - listers/admissionregistration/v1alpha1 - - listers/admissionregistration/v1beta1 - - listers/apps/v1 - - listers/apps/v1beta1 - - listers/apps/v1beta2 - - listers/autoscaling/v1 - - listers/autoscaling/v2beta1 - - listers/batch/v1 - - listers/batch/v1beta1 - - listers/batch/v2alpha1 - - listers/certificates/v1beta1 - - listers/core/v1 - - listers/events/v1beta1 - - listers/extensions/v1beta1 - - listers/networking/v1 - - listers/policy/v1beta1 - - listers/rbac/v1 - - listers/rbac/v1alpha1 - - listers/rbac/v1beta1 - - listers/scheduling/v1alpha1 - - listers/settings/v1alpha1 - - listers/storage/v1 - - listers/storage/v1alpha1 - - listers/storage/v1beta1 - - pkg/version - - plugin/pkg/client/auth - - plugin/pkg/client/auth/azure - - plugin/pkg/client/auth/gcp - - plugin/pkg/client/auth/oidc - - plugin/pkg/client/auth/openstack - - rest - - rest/fake - - rest/watch - - testing - - third_party/forked/golang/template - - tools/auth - - tools/cache - - tools/clientcmd - - tools/clientcmd/api - - tools/clientcmd/api/latest - - tools/clientcmd/api/v1 - - tools/leaderelection - - tools/leaderelection/resourcelock - - tools/metrics - - tools/pager - - tools/record - - tools/reference - - transport - - util/buffer - - util/cert - - util/flowcontrol - - util/homedir - - util/integer - - util/jsonpath - - util/workqueue -- name: k8s.io/code-generator - version: 91d3f6a57905178524105a085085901bb73bd3dc -- name: k8s.io/gengo - version: b58fc7edb82e0c6ffc9b8aef61813c7261b785d4 - subpackages: - - args -- name: k8s.io/kube-openapi - version: 39a7bf85c140f972372c2a0d1ee40adbf0c8bfe1 - subpackages: - - pkg/builder - - pkg/common - - pkg/handler - - pkg/util - - pkg/util/proto -testImports: -- name: github.com/cockroachdb/cmux - version: 112f0506e7743d64a6eb8fedbcff13d9979bbf92 -- name: github.com/coreos/bbolt - version: 32c383e75ce054674c53b5a07e55de85332aee14 -- name: github.com/coreos/pkg - version: fa29b1d70f0beaddd4c7021607cc3c3be8ce94b8 - subpackages: - - capnslog - - health - - httputil - - timeutil -- name: github.com/jonboulle/clockwork - version: 72f9bd7c4e0c2a40055ab3d0f09654f730cce982 -- name: github.com/xiang90/probing - version: 07dd2e8dfe18522e9c447ba95f2fe95262f63bb2 diff --git a/glide.yaml b/glide.yaml deleted file mode 100644 index dd567edfda3..00000000000 --- a/glide.yaml +++ /dev/null @@ -1,110 +0,0 @@ -package: github.com/kubernetes-incubator/service-catalog -import: -- package: github.com/emicklei/go-restful - version: ff4f55a206334ef123e4f79bbf348980da81ca46 - subpackages: - - swagger -# This is a dependency of github.com/emicklei/go-restful -- package: google.golang.org/appengine - version: 24e4144ec923c2374f6b06610c0df16a9222c3d9 -- package: github.com/PuerkitoBio/purell - version: 8a290539e2e8629dbc4e6bad948158f790ec31f4 -- package: github.com/spf13/pflag - version: 9ff6c6923cfffbcd502984b8e0c80539a94968b7 -- package: github.com/golang/glog - # We want Nov 4, 2014 version as the Jul 24, 2015 version (latest version) - # introduces bug documented in issue 1187 - version: 44145f04b68cf362d9c4df2182967c2275eaefed -- package: github.com/davecgh/go-spew - version: 782f4967f2dc4564575ca782fe2d04090b5faca8 - subpackages: - - spew -- package: github.com/PuerkitoBio/urlesc - version: 5bd2802263f21d8788851d5305584c82a5c75d7e -- package: github.com/go-openapi/jsonreference - version: 13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272 -- package: github.com/go-openapi/jsonpointer - version: 46af16f9f7b149af66e5d1bd010e3574dc06de98 -- package: github.com/go-openapi/spec - version: 7abd5745472fff5eb3685386d5fb8bf38683154d # in sync with k8s.io/apimachinery -- package: github.com/go-openapi/swag - version: f3f9494671f93fcff853e3c6e9e948b3eb71e590 # in sync with k8s.io/apimachinery -- package: github.com/mailru/easyjson - version: 2f5df55504ebc322e4d52d34df6a1f5b503bf26d # in sync with k8s.io/apimachinery - subpackages: - - jlexer -- package: golang.org/x/text - version: b19bf474d317b857955b12035d2c5acb57ce8b01 - subpackages: - - secure/precis -- package: github.com/gogo/protobuf - version: c0656edd0d9eab7c66d1eb0c568f9039345796f7 - subpackages: - - proto -- package: golang.org/x/net - version: 1c05540f6879653db88113bc4a2b70aec4bd491f - subpackages: - - http2 -- package: github.com/golang/protobuf - version: 1643683e1b54a9e88ad26d98f81400c8c9d9f4f9 # in sync with k8s.io/apimachinery - subpackages: - - proto -- package: gopkg.in/natefinch/lumberjack.v2 - version: 20b71e5b60d756d3d2f80def009790325acc2b23 -- package: github.com/pkg/errors - version: a22138067af1c4942683050411a841ade67fe1eb -- package: github.com/howeyc/gopass - version: bf9dde6d0d2c004a008c27aaee91170c786f6db8 -- package: github.com/ghodss/yaml - version: 73d445a93680fa1a78ae23a5839bad48f32ba1ee -- package: github.com/gorilla/mux - version: 8096f47503459bcc74d1f4c487b7e6e42e5746b5 -- package: github.com/satori/go.uuid - version: 5bf94b69c6b68ee1b541973bb8e1144db23a194b -- package: github.com/jteeuwen/go-bindata - version: a0ff2567cfb70903282db057e799fd826784d41d -- package: github.com/spf13/cobra - version: f62e98d28ab7ad31d707ba837a966378465c7b57 -- package: k8s.io/gengo - version: b58fc7edb82e0c6ffc9b8aef61813c7261b785d4 # in sync with https://github.com/kubernetes/code-generator/blob/kubernetes-1.9.1/Godeps/Godeps.json - subpackages: - - args -- package: github.com/kubernetes/repo-infra - version: 2d2eb5e12b4663fc4d764b5db9daab39334d3f37 # master, latest (Dec 18, 2017) -- package: k8s.io/client-go - version: kubernetes-1.9.1 # tag/kubernetes-1.9.1 -- package: k8s.io/apimachinery - version: kubernetes-1.9.1 # tag/kubernetes-1.9.1 -- package: k8s.io/kube-openapi - version: 39a7bf85c140f972372c2a0d1ee40adbf0c8bfe1 # in sync with https://github.com/kubernetes/kubernetes/blob/v1.9.1/Godeps/Godeps.json -- package: k8s.io/api - version: kubernetes-1.9.1 # tag/kubernetes-1.9.1 -- package: k8s.io/apiserver - version: kubernetes-1.9.1 # tag/kubernetes-1.9.1 -- package: k8s.io/code-generator - version: kubernetes-1.9.1 # tag/kubernetes-1.9.1 -- package: github.com/pivotal-cf/brokerapi -- package: code.cloudfoundry.org/lager - version: dfcbcba2dd4a5228c43b0292d219d5c010daed3a -- package: github.com/gorilla/context - version: 215affda49addc4c8ef7e2534915df2c8c35c6cd -- package: github.com/pmorie/go-open-service-broker-client - version: 31d8027f493f8f23f850415d171c7c52a972a6f2 -- package: github.com/coreos/etcd - # in sync with Kubernetes master (k8s 1.10 future release), but different from Kubernetes etcd version (3.1.10), - # but seems to have some improvements in memory consumption for embedded mode used in integration tests? - # (with v3.1.10 etcd pod crashes in Travis CI while we run tests) - # see https://github.com/kubernetes/kubernetes/blob/v1.9.1/Godeps/Godeps.json - version: 3.2.13 -- package: google.golang.org/grpc - version: v1.7.5 # compatible with etcd 3.2.13 -- package: github.com/grpc-ecosystem/grpc-gateway - version: v1.3.0 # compatible with etcd 3.2.13 -- package: github.com/grpc-ecosystem/go-grpc-prometheus - version: v1.1 # compatible with etcd 3.2.13 -- package: github.com/olekukonko/tablewriter - version: 96aac992fc8b1a4c83841a6c3e7178d20d989625 -- package: github.com/hashicorp/go-multierror - version: b7773ae218740a7be65057fc60b366a49b538a44 -- package: github.com/spf13/viper - version: ~1.0.0 diff --git a/test/test-dep.sh b/test/test-dep.sh new file mode 100755 index 00000000000..8ff52936685 --- /dev/null +++ b/test/test-dep.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +result=0 + +function cleanup() { + popd + rm -r contrib/examples/consumer/vendor + + if [[ "${result:-}" != "0" ]]; then + echo "A downstream consumer of our client library cannot use dep to vendor Service Catalog. You may need to add a constraint to Gopkg.toml to address." + exit ${result} + fi +} + +pushd contrib/examples/consumer +trap "cleanup" EXIT + +dep ensure +go build . + +echo "Verified that our Gopkg.toml is sufficient for a downstream consumer of our client library." diff --git a/vendor/bitbucket.org/ww/goautoneg/.hg_archival.txt b/vendor/bitbucket.org/ww/goautoneg/.hg_archival.txt deleted file mode 100644 index b9a2ff98457..00000000000 --- a/vendor/bitbucket.org/ww/goautoneg/.hg_archival.txt +++ /dev/null @@ -1,6 +0,0 @@ -repo: 848b351341922ce39becda978778724d5b58dbca -node: 75cd24fc2f2c2a2088577d12123ddee5f54e0675 -branch: default -latesttag: null -latesttagdistance: 5 -changessincelatesttag: 5 diff --git a/vendor/code.cloudfoundry.org/lager/chug/match_log_entry_test.go b/vendor/code.cloudfoundry.org/lager/chug/match_log_entry_test.go index 03d6c77b308..e1c5e568301 100644 --- a/vendor/code.cloudfoundry.org/lager/chug/match_log_entry_test.go +++ b/vendor/code.cloudfoundry.org/lager/chug/match_log_entry_test.go @@ -23,11 +23,11 @@ func (m *logEntryMatcher) Match(actual interface{}) (success bool, err error) { return false, fmt.Errorf("MatchLogEntry must be passed a chug.LogEntry. Got:\n%s", format.Object(actual, 1)) } - return m.entry.LogLevel == actualEntry.LogLevel && + return reflect.DeepEqual(m.entry.Error, actualEntry.Error) && + m.entry.LogLevel == actualEntry.LogLevel && m.entry.Source == actualEntry.Source && m.entry.Message == actualEntry.Message && m.entry.Session == actualEntry.Session && - reflect.DeepEqual(m.entry.Error, actualEntry.Error) && m.entry.Trace == actualEntry.Trace && reflect.DeepEqual(m.entry.Data, actualEntry.Data), nil } diff --git a/vendor/code.cloudfoundry.org/lager/lagerflags/README.md b/vendor/code.cloudfoundry.org/lager/lagerflags/README.md index cc081ba2ece..2b2326961d4 100644 --- a/vendor/code.cloudfoundry.org/lager/lagerflags/README.md +++ b/vendor/code.cloudfoundry.org/lager/lagerflags/README.md @@ -17,8 +17,8 @@ import ( "flag" "fmt" - "github.com/cloudfoundry/lager/lagerflags" - "github.com/cloudfoundry/lager" + "code.cloudfoundry.org/lager/lagerflags" + "code.cloudfoundry.org/lager" ) func main() { diff --git a/vendor/code.cloudfoundry.org/lager/lagertest/test_sink.go b/vendor/code.cloudfoundry.org/lager/lagertest/test_sink.go index 651ce7c653a..a722e3d95c2 100644 --- a/vendor/code.cloudfoundry.org/lager/lagertest/test_sink.go +++ b/vendor/code.cloudfoundry.org/lager/lagertest/test_sink.go @@ -5,6 +5,7 @@ import ( "context" "encoding/json" "io" + "sync" "github.com/onsi/ginkgo" "github.com/onsi/gomega/gbytes" @@ -19,8 +20,10 @@ type TestLogger struct { } type TestSink struct { + writeLock *sync.Mutex lager.Sink buffer *gbytes.Buffer + Errors []error } func NewTestLogger(component string) *TestLogger { @@ -41,8 +44,9 @@ func NewTestSink() *TestSink { buffer := gbytes.NewBuffer() return &TestSink{ - Sink: lager.NewWriterSink(buffer, lager.DEBUG), - buffer: buffer, + writeLock: new(sync.Mutex), + Sink: lager.NewWriterSink(buffer, lager.DEBUG), + buffer: buffer, } } @@ -75,3 +79,13 @@ func (s *TestSink) LogMessages() []string { } return messages } + +func (s *TestSink) Log(log lager.LogFormat) { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + if log.Error != nil { + s.Errors = append(s.Errors, log.Error) + } + s.Sink.Log(log) +} diff --git a/vendor/code.cloudfoundry.org/lager/logger.go b/vendor/code.cloudfoundry.org/lager/logger.go index 70727655a65..13467328a42 100644 --- a/vendor/code.cloudfoundry.org/lager/logger.go +++ b/vendor/code.cloudfoundry.org/lager/logger.go @@ -117,6 +117,7 @@ func (l *logger) Error(action string, err error, data ...Data) { Message: fmt.Sprintf("%s.%s", l.task, action), LogLevel: ERROR, Data: logData, + Error: err, } for _, sink := range l.sinks { @@ -143,6 +144,7 @@ func (l *logger) Fatal(action string, err error, data ...Data) { Message: fmt.Sprintf("%s.%s", l.task, action), LogLevel: FATAL, Data: logData, + Error: err, } for _, sink := range l.sinks { diff --git a/vendor/code.cloudfoundry.org/lager/logger_test.go b/vendor/code.cloudfoundry.org/lager/logger_test.go index 1d7e173addc..ea41ac96f18 100644 --- a/vendor/code.cloudfoundry.org/lager/logger_test.go +++ b/vendor/code.cloudfoundry.org/lager/logger_test.go @@ -251,6 +251,10 @@ var _ = Describe("Logger", func() { It("data contains error message", func() { Expect(testSink.Logs()[0].Data["error"]).To(Equal(err.Error())) }) + + It("retains the original error values", func() { + Expect(testSink.Errors).To(Equal([]error{err})) + }) }) Context("with no log data", func() { @@ -263,6 +267,10 @@ var _ = Describe("Logger", func() { It("data contains error message", func() { Expect(testSink.Logs()[0].Data["error"]).To(Equal(err.Error())) }) + + It("retains the original error values", func() { + Expect(testSink.Errors).To(Equal([]error{err})) + }) }) Context("with no error", func() { @@ -305,6 +313,10 @@ var _ = Describe("Logger", func() { It("panics with the provided error", func() { Expect(fatalErr).To(Equal(err)) }) + + It("retains the original error values", func() { + Expect(testSink.Errors).To(Equal([]error{err})) + }) }) Context("with no log data", func() { @@ -329,6 +341,10 @@ var _ = Describe("Logger", func() { It("panics with the provided error", func() { Expect(fatalErr).To(Equal(err)) }) + + It("retains the original error values", func() { + Expect(testSink.Errors).To(Equal([]error{err})) + }) }) Context("with no error", func() { @@ -354,5 +370,6 @@ var _ = Describe("Logger", func() { Expect(fatalErr).To(BeNil()) }) }) + }) }) diff --git a/vendor/code.cloudfoundry.org/lager/models.go b/vendor/code.cloudfoundry.org/lager/models.go index 1f537c70227..03a81040160 100644 --- a/vendor/code.cloudfoundry.org/lager/models.go +++ b/vendor/code.cloudfoundry.org/lager/models.go @@ -22,6 +22,7 @@ type LogFormat struct { Message string `json:"message"` LogLevel LogLevel `json:"log_level"` Data Data `json:"data"` + Error error `json:"-"` } func (log LogFormat) ToJSON() []byte { diff --git a/vendor/code.cloudfoundry.org/lager/redacting_writer_sink_test.go b/vendor/code.cloudfoundry.org/lager/redacting_writer_sink_test.go index 3b920064261..677d5d463ee 100644 --- a/vendor/code.cloudfoundry.org/lager/redacting_writer_sink_test.go +++ b/vendor/code.cloudfoundry.org/lager/redacting_writer_sink_test.go @@ -48,17 +48,6 @@ var _ = Describe("RedactingWriterSink", func() { Expect(message["data"].(map[string]interface{})["lager serialisation error"]).To(Equal("json: unsupported type: func()")) Expect(message["data"].(map[string]interface{})["data_dump"]).ToNot(BeEmpty()) }) - - Measure("should be efficient", func(b Benchmarker) { - runtime := b.Time("runtime", func() { - for i := 0; i < 5000; i++ { - sink.Log(lager.LogFormat{LogLevel: lager.INFO, Message: "hello world", Data: map[string]interface{}{"some_key": func() {}}}) - Expect(writer.Copy()).ToNot(BeEmpty()) - } - }) - - Expect(runtime.Seconds()).To(BeNumerically("<", 1), "logging shouldn't take too long.") - }, 1) }) Context("when logging below the minimum log level", func() { diff --git a/vendor/code.cloudfoundry.org/lager/writer_sink_test.go b/vendor/code.cloudfoundry.org/lager/writer_sink_test.go index 748a30a0ce5..86a6571bfad 100644 --- a/vendor/code.cloudfoundry.org/lager/writer_sink_test.go +++ b/vendor/code.cloudfoundry.org/lager/writer_sink_test.go @@ -51,17 +51,6 @@ var _ = Describe("WriterSink", func() { Expect(message["data"].(map[string]interface{})["lager serialisation error"]).To(Equal("json: unsupported type: func()")) Expect(message["data"].(map[string]interface{})["data_dump"]).ToNot(BeEmpty()) }) - - Measure("should be efficient", func(b Benchmarker) { - runtime := b.Time("runtime", func() { - for i := 0; i < 5000; i++ { - sink.Log(lager.LogFormat{LogLevel: lager.INFO, Message: "hello world", Data: map[string]interface{}{"some_key": func() {}}}) - Expect(writer.Copy()).ToNot(BeEmpty()) - } - }) - - Expect(runtime.Seconds()).To(BeNumerically("<", 1), "logging shouldn't take too long.") - }, 1) }) Context("when logging below the minimum log level", func() { diff --git a/vendor/github.com/bazelbuild/buildtools/.gitignore b/vendor/github.com/bazelbuild/buildtools/.gitignore new file mode 100644 index 00000000000..446db8115db --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/.gitignore @@ -0,0 +1,4 @@ +/bazel-* +*~ +.idea +*.iml diff --git a/vendor/github.com/bazelbuild/buildtools/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/BUILD.bazel new file mode 100644 index 00000000000..652f6890c1b --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/BUILD.bazel @@ -0,0 +1,24 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_prefix") + +go_prefix("github.com/bazelbuild/buildtools") + +config_setting( + name = "windows", + values = {"cpu": "x64_windows"}, +) + +test_suite( + name = "tests", + tests = [ + "//api_proto:api.gen.pb.go_checkshtest", + "//build:go_default_test", + "//build:parse.y.go_checkshtest", + "//build_proto:build.gen.pb.go_checkshtest", + "//deps_proto:deps.gen.pb.go_checkshtest", + "//edit:go_default_test", + "//extra_actions_base_proto:extra_actions_base.gen.pb.go_checkshtest", + "//lang:tables.gen.go_checkshtest", + "//tables:go_default_test", + "//wspace:go_default_test", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/CONTRIBUTING.md b/vendor/github.com/bazelbuild/buildtools/CONTRIBUTING.md new file mode 100644 index 00000000000..2827b7d3fa2 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/CONTRIBUTING.md @@ -0,0 +1,27 @@ +Want to contribute? Great! First, read this page (including the small print at the end). + +### Before you contribute +Before we can use your code, you must sign the +[Google Individual Contributor License Agreement] +(https://cla.developers.google.com/about/google-individual) +(CLA), which you can do online. The CLA is necessary mainly because you own the +copyright to your changes, even after your contribution becomes part of our +codebase, so we need your permission to use and distribute your code. We also +need to be sure of various other things—for instance that you'll tell us if you +know that your code infringes on other people's patents. You don't have to sign +the CLA until after you've submitted your code for review and a member has +approved it, but you must do it before we can put your code into our codebase. +Before you start working on a larger contribution, you should get in touch with +us first through the issue tracker with your idea so that we can help out and +possibly guide you. Coordinating up front makes it much easier to avoid +frustration later on. + +### Code reviews +All submissions, including submissions by project members, require review. We +use Github pull requests for this purpose. + +### The small print +Contributions made by corporations are covered by a different agreement than +the one above, the +[Software Grant and Corporate Contributor License Agreement] +(https://cla.developers.google.com/about/google-corporate). diff --git a/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS b/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS new file mode 100644 index 00000000000..2d919a47f9b --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS @@ -0,0 +1,15 @@ +# People who have agreed to one of the CLAs and can contribute patches. +# The AUTHORS file lists the copyright holders; this file +# lists people. For example, Google employees are listed here +# but not in AUTHORS, because Google holds the copyright. +# +# https://developers.google.com/open-source/cla/individual +# https://developers.google.com/open-source/cla/corporate +# +# Names should be added to this file as: +# Name +Paul Bethe +Russ Cox +Laurent Le Brun +Justine Alexandra Roberts Tunney +Nilton Volpato \ No newline at end of file diff --git a/vendor/github.com/bazelbuild/buildtools/LICENSE b/vendor/github.com/bazelbuild/buildtools/LICENSE new file mode 100644 index 00000000000..0adcb5d6f52 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/LICENSE @@ -0,0 +1,13 @@ +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/bazelbuild/buildtools/README.md b/vendor/github.com/bazelbuild/buildtools/README.md new file mode 100644 index 00000000000..1a44213a9c7 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/README.md @@ -0,0 +1,15 @@ +# Buildtools for bazel + +This repository contains developer tools for working with Google's `bazel` buildtool. + +* [buildifier](buildifier/README.md) For formatting BUILD, BUILD.bazel and BUCK files in a standard way +* [buildozer](buildozer/README.md) For doing command-line operations on these files. +* [unused_deps](unused_deps/README.md) For finding unneeded dependencies in +[java_library](https://docs.bazel.build/versions/master/be/java.html#java_library) rules. + + +[![Build Status](https://ci.bazel.io/buildStatus/icon?job=buildtools)](https://ci.bazel.io/job/buildtools) + +## Setup + +See instructions in each tool's directory. diff --git a/vendor/github.com/bazelbuild/buildtools/WORKSPACE b/vendor/github.com/bazelbuild/buildtools/WORKSPACE new file mode 100644 index 00000000000..ccbfcd9604d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/WORKSPACE @@ -0,0 +1,34 @@ +workspace(name = "com_github_bazelbuild_buildtools") + +# 0.5.5 +http_archive( + name = "io_bazel_rules_go", + sha256 = "51bc38fd56e61ffd83f455e3083858ca3cc307d76fa1387b2ae2baae379afcc2", + strip_prefix = "rules_go-71cdb6fd5f887d215bdbe0e4d1eb137278b09c39", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/rules_go/archive/71cdb6fd5f887d215bdbe0e4d1eb137278b09c39.tar.gz", + "https://github.com/bazelbuild/rules_go/archive/71cdb6fd5f887d215bdbe0e4d1eb137278b09c39.tar.gz", + ], +) + +load( + "@io_bazel_rules_go//go:def.bzl", + "go_rules_dependencies", + "go_register_toolchains", + "go_repository", +) + +go_rules_dependencies() + +go_register_toolchains() + +# used for build.proto +http_archive( + name = "io_bazel", + sha256 = "255e1199c0876b9a8cc02d5ea569b6cfe1901d30428355817b7606ddecb04c15", + strip_prefix = "bazel-0.8.0", + urls = [ + "http://mirror.bazel.build/github.com/bazelbuild/bazel/archive/0.8.0.tar.gz", + "https://github.com/bazelbuild/bazel/archive/0.8.0.tar.gz", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/api_proto/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/api_proto/BUILD.bazel new file mode 100644 index 00000000000..ed36ace0203 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/api_proto/BUILD.bazel @@ -0,0 +1,13 @@ +load("@io_bazel_rules_go//proto:go_proto_library.bzl", "go_proto_library") +load("//build:build_defs.bzl", "genfile_check_test") + +genfile_check_test( + src = "api.gen.pb.go", + gen = "api.pb.go", +) + +go_proto_library( + name = "go_default_library", + srcs = ["api.proto"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/api_proto/api.gen.pb.go b/vendor/github.com/bazelbuild/buildtools/api_proto/api.gen.pb.go new file mode 100755 index 00000000000..d060daca5a7 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/api_proto/api.gen.pb.go @@ -0,0 +1,315 @@ +// Code generated by protoc-gen-go. +// source: api_proto/api.proto +// DO NOT EDIT! + +/* +Package devtools_buildozer is a generated protocol buffer package. + +It is generated from these files: + api_proto/api.proto + +It has these top-level messages: + Output + RepeatedString +*/ +package devtools_buildozer + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Output_Record_Field_ERROR int32 + +const ( + Output_Record_Field_UNKNOWN Output_Record_Field_ERROR = 0 + Output_Record_Field_MISSING Output_Record_Field_ERROR = 1 +) + +var Output_Record_Field_ERROR_name = map[int32]string{ + 0: "UNKNOWN", + 1: "MISSING", +} +var Output_Record_Field_ERROR_value = map[string]int32{ + "UNKNOWN": 0, + "MISSING": 1, +} + +func (x Output_Record_Field_ERROR) String() string { + return proto.EnumName(Output_Record_Field_ERROR_name, int32(x)) +} +func (Output_Record_Field_ERROR) EnumDescriptor() ([]byte, []int) { + return fileDescriptor0, []int{0, 0, 0, 0} +} + +type Output struct { + Records []*Output_Record `protobuf:"bytes,1,rep,name=records" json:"records,omitempty"` +} + +func (m *Output) Reset() { *m = Output{} } +func (m *Output) String() string { return proto.CompactTextString(m) } +func (*Output) ProtoMessage() {} +func (*Output) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *Output) GetRecords() []*Output_Record { + if m != nil { + return m.Records + } + return nil +} + +type Output_Record struct { + Fields []*Output_Record_Field `protobuf:"bytes,1,rep,name=fields" json:"fields,omitempty"` +} + +func (m *Output_Record) Reset() { *m = Output_Record{} } +func (m *Output_Record) String() string { return proto.CompactTextString(m) } +func (*Output_Record) ProtoMessage() {} +func (*Output_Record) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } + +func (m *Output_Record) GetFields() []*Output_Record_Field { + if m != nil { + return m.Fields + } + return nil +} + +type Output_Record_Field struct { + // Types that are valid to be assigned to Value: + // *Output_Record_Field_Text + // *Output_Record_Field_Number + // *Output_Record_Field_Error + // *Output_Record_Field_List + Value isOutput_Record_Field_Value `protobuf_oneof:"value"` + // Used internally by Buildozer to decide whether a field should be quoted + // when printing. This does not affect the contents of 'value'. + QuoteWhenPrinting bool `protobuf:"varint,7,opt,name=quote_when_printing,json=quoteWhenPrinting" json:"quote_when_printing,omitempty"` +} + +func (m *Output_Record_Field) Reset() { *m = Output_Record_Field{} } +func (m *Output_Record_Field) String() string { return proto.CompactTextString(m) } +func (*Output_Record_Field) ProtoMessage() {} +func (*Output_Record_Field) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0, 0} } + +type isOutput_Record_Field_Value interface { + isOutput_Record_Field_Value() +} + +type Output_Record_Field_Text struct { + Text string `protobuf:"bytes,1,opt,name=text,oneof"` +} +type Output_Record_Field_Number struct { + Number int32 `protobuf:"varint,2,opt,name=number,oneof"` +} +type Output_Record_Field_Error struct { + Error Output_Record_Field_ERROR `protobuf:"varint,3,opt,name=error,enum=devtools.buildozer.Output_Record_Field_ERROR,oneof"` +} +type Output_Record_Field_List struct { + List *RepeatedString `protobuf:"bytes,5,opt,name=list,oneof"` +} + +func (*Output_Record_Field_Text) isOutput_Record_Field_Value() {} +func (*Output_Record_Field_Number) isOutput_Record_Field_Value() {} +func (*Output_Record_Field_Error) isOutput_Record_Field_Value() {} +func (*Output_Record_Field_List) isOutput_Record_Field_Value() {} + +func (m *Output_Record_Field) GetValue() isOutput_Record_Field_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *Output_Record_Field) GetText() string { + if x, ok := m.GetValue().(*Output_Record_Field_Text); ok { + return x.Text + } + return "" +} + +func (m *Output_Record_Field) GetNumber() int32 { + if x, ok := m.GetValue().(*Output_Record_Field_Number); ok { + return x.Number + } + return 0 +} + +func (m *Output_Record_Field) GetError() Output_Record_Field_ERROR { + if x, ok := m.GetValue().(*Output_Record_Field_Error); ok { + return x.Error + } + return Output_Record_Field_UNKNOWN +} + +func (m *Output_Record_Field) GetList() *RepeatedString { + if x, ok := m.GetValue().(*Output_Record_Field_List); ok { + return x.List + } + return nil +} + +func (m *Output_Record_Field) GetQuoteWhenPrinting() bool { + if m != nil { + return m.QuoteWhenPrinting + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Output_Record_Field) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Output_Record_Field_OneofMarshaler, _Output_Record_Field_OneofUnmarshaler, _Output_Record_Field_OneofSizer, []interface{}{ + (*Output_Record_Field_Text)(nil), + (*Output_Record_Field_Number)(nil), + (*Output_Record_Field_Error)(nil), + (*Output_Record_Field_List)(nil), + } +} + +func _Output_Record_Field_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Output_Record_Field) + // value + switch x := m.Value.(type) { + case *Output_Record_Field_Text: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Text) + case *Output_Record_Field_Number: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Number)) + case *Output_Record_Field_Error: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Error)) + case *Output_Record_Field_List: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.List); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Output_Record_Field.Value has unexpected type %T", x) + } + return nil +} + +func _Output_Record_Field_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Output_Record_Field) + switch tag { + case 1: // value.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &Output_Record_Field_Text{x} + return true, err + case 2: // value.number + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &Output_Record_Field_Number{int32(x)} + return true, err + case 3: // value.error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &Output_Record_Field_Error{Output_Record_Field_ERROR(x)} + return true, err + case 5: // value.list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RepeatedString) + err := b.DecodeMessage(msg) + m.Value = &Output_Record_Field_List{msg} + return true, err + default: + return false, nil + } +} + +func _Output_Record_Field_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Output_Record_Field) + // value + switch x := m.Value.(type) { + case *Output_Record_Field_Text: + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Text))) + n += len(x.Text) + case *Output_Record_Field_Number: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Number)) + case *Output_Record_Field_Error: + n += proto.SizeVarint(3<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Error)) + case *Output_Record_Field_List: + s := proto.Size(x.List) + n += proto.SizeVarint(5<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type RepeatedString struct { + Strings []string `protobuf:"bytes,1,rep,name=strings" json:"strings,omitempty"` +} + +func (m *RepeatedString) Reset() { *m = RepeatedString{} } +func (m *RepeatedString) String() string { return proto.CompactTextString(m) } +func (*RepeatedString) ProtoMessage() {} +func (*RepeatedString) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *RepeatedString) GetStrings() []string { + if m != nil { + return m.Strings + } + return nil +} + +func init() { + proto.RegisterType((*Output)(nil), "devtools.buildozer.Output") + proto.RegisterType((*Output_Record)(nil), "devtools.buildozer.Output.Record") + proto.RegisterType((*Output_Record_Field)(nil), "devtools.buildozer.Output.Record.Field") + proto.RegisterType((*RepeatedString)(nil), "devtools.buildozer.RepeatedString") + proto.RegisterEnum("devtools.buildozer.Output_Record_Field_ERROR", Output_Record_Field_ERROR_name, Output_Record_Field_ERROR_value) +} + +func init() { proto.RegisterFile("api_proto/api.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 336 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xc1, 0x4a, 0xf3, 0x40, + 0x14, 0x85, 0x33, 0x6d, 0x93, 0xfc, 0xbd, 0x85, 0xd2, 0x7f, 0xea, 0x62, 0xe8, 0x2a, 0xcd, 0xc6, + 0x20, 0x38, 0x42, 0xdd, 0x08, 0x2e, 0x04, 0xa1, 0xda, 0x22, 0xa6, 0x32, 0x45, 0xba, 0x2c, 0xa9, + 0x19, 0xdb, 0x81, 0x98, 0x89, 0x93, 0x49, 0x15, 0x9f, 0xc3, 0x87, 0xf0, 0xb1, 0x7c, 0x14, 0xc9, + 0x24, 0x15, 0x44, 0x41, 0x77, 0xf7, 0xdc, 0x7b, 0xbe, 0x73, 0x42, 0x06, 0xfa, 0x51, 0x26, 0x96, + 0x99, 0x92, 0x5a, 0x1e, 0x45, 0x99, 0xa0, 0x66, 0xc2, 0x38, 0xe6, 0x5b, 0x2d, 0x65, 0x92, 0xd3, + 0x55, 0x21, 0x92, 0x58, 0xbe, 0x70, 0xe5, 0xbf, 0x35, 0xc1, 0x99, 0x15, 0x3a, 0x2b, 0x34, 0x3e, + 0x05, 0x57, 0xf1, 0x3b, 0xa9, 0xe2, 0x9c, 0x20, 0xaf, 0x19, 0x74, 0x46, 0x43, 0xfa, 0x1d, 0xa0, + 0x95, 0x99, 0x32, 0xe3, 0x64, 0x3b, 0x62, 0xf0, 0xde, 0x00, 0xa7, 0xda, 0xe1, 0x33, 0x70, 0xee, + 0x05, 0x4f, 0x3e, 0x63, 0xf6, 0x7f, 0x8d, 0xa1, 0x17, 0xa5, 0x9f, 0xd5, 0xd8, 0xe0, 0xb5, 0x01, + 0xb6, 0xd9, 0xe0, 0x3d, 0x68, 0x69, 0xfe, 0xac, 0x09, 0xf2, 0x50, 0xd0, 0x9e, 0x58, 0xcc, 0x28, + 0x4c, 0xc0, 0x49, 0x8b, 0x87, 0x15, 0x57, 0xa4, 0xe1, 0xa1, 0xc0, 0x9e, 0x58, 0xac, 0xd6, 0x78, + 0x0c, 0x36, 0x57, 0x4a, 0x2a, 0xd2, 0xf4, 0x50, 0xd0, 0x1d, 0x1d, 0xfe, 0xb1, 0x99, 0x8e, 0x19, + 0x9b, 0xb1, 0x89, 0xc5, 0x2a, 0x1a, 0x9f, 0x40, 0x2b, 0x11, 0xb9, 0x26, 0xb6, 0x87, 0x82, 0xce, + 0xc8, 0xff, 0x29, 0x85, 0xf1, 0x8c, 0x47, 0x9a, 0xc7, 0x73, 0xad, 0x44, 0xba, 0x2e, 0x3f, 0xad, + 0x24, 0x30, 0x85, 0xfe, 0x63, 0x21, 0x35, 0x5f, 0x3e, 0x6d, 0x78, 0xba, 0xcc, 0x94, 0x48, 0xb5, + 0x48, 0xd7, 0xc4, 0xf5, 0x50, 0xf0, 0x8f, 0xfd, 0x37, 0xa7, 0xc5, 0x86, 0xa7, 0x37, 0xf5, 0xc1, + 0x1f, 0x82, 0x6d, 0xba, 0x71, 0x07, 0xdc, 0xdb, 0xf0, 0x2a, 0x9c, 0x2d, 0xc2, 0x9e, 0x55, 0x8a, + 0xeb, 0xe9, 0x7c, 0x3e, 0x0d, 0x2f, 0x7b, 0xe8, 0xdc, 0x05, 0x7b, 0x1b, 0x25, 0x05, 0xf7, 0x0f, + 0xa0, 0xfb, 0xb5, 0x15, 0x13, 0x70, 0x73, 0x33, 0x55, 0xbf, 0xba, 0xcd, 0x76, 0x72, 0xe5, 0x98, + 0x17, 0x3f, 0xfe, 0x08, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x62, 0x58, 0xc4, 0x08, 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/bazelbuild/buildtools/api_proto/api.proto b/vendor/github.com/bazelbuild/buildtools/api_proto/api.proto new file mode 100644 index 00000000000..a991dccbdcd --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/api_proto/api.proto @@ -0,0 +1,42 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +syntax = "proto3"; + +package devtools.buildozer; + +message Output { + repeated Record records = 1; + message Record { + repeated Field fields = 1; + message Field { + oneof value { + string text = 1; + int32 number = 2; + ERROR error = 3; + RepeatedString list = 5; + } + // Used internally by Buildozer to decide whether a field should be quoted + // when printing. This does not affect the contents of 'value'. + bool quote_when_printing = 7; + + enum ERROR { + UNKNOWN = 0; + MISSING = 1; + } + } + } +} + +message RepeatedString { + repeated string strings = 1; +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel new file mode 100644 index 00000000000..15d0a458423 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel @@ -0,0 +1,45 @@ +# gazelle:exclude parse.y.go +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") +load(":build_defs.bzl", "genfile_check_test", "go_yacc") + +go_yacc( + src = "parse.y", + out = "parse.y.baz.go", +) + +# parse.y.go is checked in to satisfy the Go community +# https://github.com/bazelbuild/buildtools/issues/14 +# this test ensures it doesn't get stale. +genfile_check_test( + src = "parse.y.go", + gen = "parse.y.baz.go", +) + +go_library( + name = "go_default_library", + srcs = [ + "lex.go", + "parse.y.baz.go", # keep + "print.go", + "quote.go", + "rewrite.go", + "rule.go", + "syntax.go", + "walk.go", + ], + visibility = ["//visibility:public"], + deps = ["//tables:go_default_library"], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = [ + "parse_test.go", + "print_test.go", + "quote_test.go", + "rule_test.go", + ], + data = glob(["testdata/*"]), + library = ":go_default_library", +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/build_defs.bzl b/vendor/github.com/bazelbuild/buildtools/build/build_defs.bzl new file mode 100644 index 00000000000..e4b5aacdd47 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/build_defs.bzl @@ -0,0 +1,52 @@ +"""Provides go_yacc and genfile_check_test + +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +_GO_YACC_TOOL = "@org_golang_x_tools//cmd/goyacc" + +def go_yacc(src, out, visibility=None): + """Runs go tool yacc -o $out $src.""" + native.genrule( + name = src + ".go_yacc", + srcs = [src], + outs = [out], + tools = [_GO_YACC_TOOL], + cmd = ("export GOROOT=$$(dirname $(location " + _GO_YACC_TOOL + "))/..;" + + " $(location " + _GO_YACC_TOOL + ") " + + " -o $(location " + out + ") $(SRCS)"), + visibility = visibility, + local = 1, + ) + +def genfile_check_test(src, gen): + """Asserts that any checked-in generated code matches regen.""" + if not src: + fail("src is required", "src") + if not gen: + fail("gen is required", "gen") + native.genrule( + name = src + "_checksh", + outs = [src + "_check.sh"], + cmd = "echo 'diff $$@' > $@", + ) + native.sh_test( + name = src + "_checkshtest", + size = "small", + srcs = [src + "_check.sh"], + data = [src, gen], + args = ["$(location " + src + ")", "$(location " + gen + ")"], + ) + diff --git a/vendor/github.com/bazelbuild/buildtools/build/lex.go b/vendor/github.com/bazelbuild/buildtools/build/lex.go new file mode 100644 index 00000000000..986f822630b --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/lex.go @@ -0,0 +1,855 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Lexical scanning for BUILD file parser. + +package build + +import ( + "bytes" + "fmt" + "strings" + "unicode/utf8" + + "github.com/bazelbuild/buildtools/tables" +) + +// Parse parses the input data and returns the corresponding parse tree. +// +// The filename is used only for generating error messages. +func Parse(filename string, data []byte) (*File, error) { + in := newInput(filename, data) + return in.parse() +} + +// An input represents a single input file being parsed. +type input struct { + // Lexing state. + filename string // name of input file, for errors + complete []byte // entire input + remaining []byte // remaining input + token []byte // token being scanned + lastToken string // most recently returned token, for error messages + pos Position // current input position + comments []Comment // accumulated comments + endStmt int // position of the end of the current statement + depth int // nesting of [ ] { } ( ) + cleanLine bool // true if the current line only contains whitespace before the current position + indent int // current line indentation in spaces + indents []int // stack of indentation levels in spaces + + // Parser state. + file *File // returned top-level syntax tree + parseError error // error encountered during parsing + + // Comment assignment state. + pre []Expr // all expressions, in preorder traversal + post []Expr // all expressions, in postorder traversal +} + +func newInput(filename string, data []byte) *input { + return &input{ + filename: filename, + complete: data, + remaining: data, + pos: Position{Line: 1, LineRune: 1, Byte: 0}, + cleanLine: true, + indents: []int{0}, + } +} + +func (in *input) currentIndent() int { + return in.indents[len(in.indents)-1] +} + +// parse parses the input file. +func (in *input) parse() (f *File, err error) { + // The parser panics for both routine errors like syntax errors + // and for programmer bugs like array index errors. + // Turn both into error returns. Catching bug panics is + // especially important when processing many files. + defer func() { + if e := recover(); e != nil { + if e == in.parseError { + err = in.parseError + } else { + err = fmt.Errorf("%s:%d:%d: internal error: %v", in.filename, in.pos.Line, in.pos.LineRune, e) + } + } + }() + + // Invoke the parser generated from parse.y. + yyParse(in) + if in.parseError != nil { + return nil, in.parseError + } + in.file.Path = in.filename + + // Assign comments to nearby syntax. + in.assignComments() + + return in.file, nil +} + +// Error is called to report an error. +// When called by the generated code s is always "syntax error". +// Error does not return: it panics. +func (in *input) Error(s string) { + if s == "syntax error" && in.lastToken != "" { + s += " near " + in.lastToken + } + in.parseError = fmt.Errorf("%s:%d:%d: %v", in.filename, in.pos.Line, in.pos.LineRune, s) + panic(in.parseError) +} + +// eof reports whether the input has reached end of file. +func (in *input) eof() bool { + return len(in.remaining) == 0 +} + +// peekRune returns the next rune in the input without consuming it. +func (in *input) peekRune() int { + if len(in.remaining) == 0 { + return 0 + } + r, _ := utf8.DecodeRune(in.remaining) + return int(r) +} + +// readRune consumes and returns the next rune in the input. +func (in *input) readRune() int { + if len(in.remaining) == 0 { + in.Error("internal lexer error: readRune at EOF") + } + r, size := utf8.DecodeRune(in.remaining) + in.remaining = in.remaining[size:] + if r == '\n' { + in.pos.Line++ + in.pos.LineRune = 1 + } else { + in.pos.LineRune++ + } + in.pos.Byte += size + return int(r) +} + +// startToken marks the beginning of the next input token. +// It must be followed by a call to endToken, once the token has +// been consumed using readRune. +func (in *input) startToken(val *yySymType) { + in.token = in.remaining + val.tok = "" + val.pos = in.pos +} + +// yySymType (used in the next few functions) is defined by the +// generated parser. It is a struct containing all the fields listed +// in parse.y's %union [sic] section. + +// endToken marks the end of an input token. +// It records the actual token string in val.tok if the caller +// has not done that already. +func (in *input) endToken(val *yySymType) { + if val.tok == "" { + tok := string(in.token[:len(in.token)-len(in.remaining)]) + val.tok = tok + in.lastToken = val.tok + } +} + +// Lex is called from the generated parser to obtain the next input token. +// It returns the token value (either a rune like '+' or a symbolic token _FOR) +// and sets val to the data associated with the token. +// +// For all our input tokens, the associated data is +// val.Pos (the position where the token begins) +// and val.Token (the input string corresponding to the token). +func (in *input) Lex(val *yySymType) int { + // Skip past spaces, stopping at non-space or EOF. + countNL := 0 // number of newlines we've skipped past + for !in.eof() { + // If a single statement is split into multiple lines, we don't need + // to track indentations and unindentations within these lines. For example: + // + // def f( + // # This indentation should be ignored + // x): + // # This unindentation should be ignored + // # Actual indentation is from 0 to 2 spaces here + // return x + // + // If the --format_bzl flag is set to false, for legacy reason we track the end of each + // code block defined at the top level instead the end of the current statement. + // + // To handle these cases, when we reach the beginning of a statement (or + // top-level code block), we scan forward to see where + // it should end and record the number of input bytes remaining + // at that endpoint. When we reach that point in the input, we + // insert an implicit semicolon to force the two expressions + // to stay separate (only if --format_bzl is set to false, for legacy reasons). + // We also set in.endStmt = 0 as a signal that we nee to track indentation levels. + // + if in.endStmt != 0 && len(in.remaining) == in.endStmt { + in.endStmt = 0 + if !tables.FormatBzlFiles { + in.lastToken = "implicit ;" + val.tok = ";" + return ';' + } + } + + // Skip over spaces. Count newlines so we can give the parser + // information about where top-level blank lines are, + // for top-level comment assignment. + c := in.peekRune() + if c == ' ' || c == '\t' || c == '\r' || c == '\n' { + if c == '\n' { + in.indent = 0 + in.cleanLine = true + if in.endStmt == 0 { + // Not in a statememt. Tell parser about top-level blank line. + in.startToken(val) + in.readRune() + in.endToken(val) + return '\n' + } + countNL++ + } else if c == ' ' && in.cleanLine { + in.indent++ + } + in.readRune() + continue + } + + // Comment runs to end of line. + if c == '#' { + // If a line contains just a comment its indentation level doesn't matter. + // Reset it to zero. + in.indent = 0 + in.cleanLine = true + + // Is this comment the only thing on its line? + // Find the last \n before this # and see if it's all + // spaces from there to here. + // If it's a suffix comment but the last non-space symbol before + // it is one of (, [, or {, treat it as a line comment that should be + // put inside the corresponding block. + i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n")) + prefix := bytes.TrimSpace(in.complete[i+1 : in.pos.Byte]) + isSuffix := true + if len(prefix) == 0 || + prefix[len(prefix)-1] == '[' || + prefix[len(prefix)-1] == '(' || + prefix[len(prefix)-1] == '{' { + isSuffix = false + } + + // Consume comment. + in.startToken(val) + for len(in.remaining) > 0 && in.readRune() != '\n' { + } + in.endToken(val) + + val.tok = strings.TrimRight(val.tok, "\n") + in.lastToken = "comment" + + // If we are at top level (not in a rule), hand the comment to + // the parser as a _COMMENT token. The grammar is written + // to handle top-level comments itself. + if in.endStmt == 0 { + // Not in a statement. Tell parser about top-level comment. + return _COMMENT + } + + // Otherwise, save comment for later attachment to syntax tree. + if countNL > 1 { + in.comments = append(in.comments, Comment{val.pos, "", false}) + } + in.comments = append(in.comments, Comment{val.pos, val.tok, isSuffix}) + countNL = 1 + continue + } + + if c == '\\' && len(in.remaining) >= 2 && in.remaining[1] == '\n' { + // We can ignore a trailing \ at end of line. + in.readRune() + continue + } + + // Found non-space non-comment. + break + } + + // Check for changes in indentation + // Skip if --format_bzl is set to false, if we're inside a statement, or if there were non-space + // characters before in the current line. + if tables.FormatBzlFiles && in.endStmt == 0 && in.cleanLine { + if in.indent > in.currentIndent() { + // A new indentation block starts + in.indents = append(in.indents, in.indent) + in.lastToken = "indent" + in.cleanLine = false + return _INDENT + } else if in.indent < in.currentIndent() { + // An indentation block ends + in.indents = in.indents[:len(in.indents)-1] + + // It's a syntax error if the current line indentation level in now greater than + // currentIndent(), should be either equal (a parent block continues) or still less + // (need to unindent more). + if in.indent > in.currentIndent() { + in.pos = val.pos + in.Error("unexpected indentation") + } + in.lastToken = "unindent" + return _UNINDENT + } + } + + in.cleanLine = false + + // If the file ends with an indented block, return the corresponding amounts of unindents. + if in.eof() && in.currentIndent() > 0 { + in.indents = in.indents[:len(in.indents)-1] + in.lastToken = "unindent" + return _UNINDENT + } + + // Found the beginning of the next token. + in.startToken(val) + defer in.endToken(val) + + // End of file. + if in.eof() { + in.lastToken = "EOF" + return _EOF + } + + // If endStmt is 0, we need to recompute where the end + // of the next statement is, so that we can + // generate a virtual end-of-rule semicolon (see above). + if in.endStmt == 0 { + in.endStmt = len(in.skipStmt(in.remaining)) + if in.endStmt == 0 { + // skipStmt got confused. + // No more virtual semicolons. + in.endStmt = -1 + } + } + + // Punctuation tokens. + switch c := in.peekRune(); c { + case '[', '(', '{': + in.depth++ + in.readRune() + return c + + case ']', ')', '}': + in.depth-- + in.readRune() + return c + + case '.', '-', '%', ':', ';', ',', '/', '*': // single-char tokens + in.readRune() + return c + + case '<', '>', '=', '!', '+': // possibly followed by = + in.readRune() + if in.peekRune() == '=' { + in.readRune() + switch c { + case '<': + return _LE + case '>': + return _GE + case '=': + return _EQ + case '!': + return _NE + case '+': + return _ADDEQ + } + } + return c + + case 'r': // possible beginning of raw quoted string + if len(in.remaining) < 2 || in.remaining[1] != '"' && in.remaining[1] != '\'' { + break + } + in.readRune() + c = in.peekRune() + fallthrough + + case '"', '\'': // quoted string + quote := c + if len(in.remaining) >= 3 && in.remaining[0] == byte(quote) && in.remaining[1] == byte(quote) && in.remaining[2] == byte(quote) { + // Triple-quoted string. + in.readRune() + in.readRune() + in.readRune() + var c1, c2, c3 int + for { + if in.eof() { + in.pos = val.pos + in.Error("unexpected EOF in string") + } + c1, c2, c3 = c2, c3, in.readRune() + if c1 == quote && c2 == quote && c3 == quote { + break + } + if c3 == '\\' { + if in.eof() { + in.pos = val.pos + in.Error("unexpected EOF in string") + } + in.readRune() + } + } + } else { + in.readRune() + for { + if in.eof() { + in.pos = val.pos + in.Error("unexpected EOF in string") + } + if in.peekRune() == '\n' { + in.Error("unexpected newline in string") + } + c := in.readRune() + if c == quote { + break + } + if c == '\\' { + if in.eof() { + in.pos = val.pos + in.Error("unexpected EOF in string") + } + in.readRune() + } + } + } + in.endToken(val) + s, triple, err := unquote(val.tok) + if err != nil { + in.Error(fmt.Sprint(err)) + } + val.str = s + val.triple = triple + return _STRING + } + + // Checked all punctuation. Must be identifier token. + if c := in.peekRune(); !isIdent(c) { + in.Error(fmt.Sprintf("unexpected input character %#q", c)) + } + + if !tables.FormatBzlFiles { + // Look for raw Python block (class, def, if, etc at beginning of line) and pass through. + if in.depth == 0 && in.pos.LineRune == 1 && hasPythonPrefix(in.remaining) { + // Find end of Python block and advance input beyond it. + // Have to loop calling readRune in order to maintain line number info. + rest := in.skipStmt(in.remaining) + for len(in.remaining) > len(rest) { + in.readRune() + } + return _PYTHON + } + } + + // Scan over alphanumeric identifier. + for { + c := in.peekRune() + if !isIdent(c) { + break + } + in.readRune() + } + + // Call endToken to set val.tok to identifier we just scanned, + // so we can look to see if val.tok is a keyword. + in.endToken(val) + if k := keywordToken[val.tok]; k != 0 { + return k + } + + return _IDENT +} + +// isIdent reports whether c is an identifier rune. +// We treat all non-ASCII runes as identifier runes. +func isIdent(c int) bool { + return '0' <= c && c <= '9' || + 'A' <= c && c <= 'Z' || + 'a' <= c && c <= 'z' || + c == '_' || + c >= 0x80 +} + +// keywordToken records the special tokens for +// strings that should not be treated as ordinary identifiers. +var keywordToken = map[string]int{ + "and": _AND, + "for": _FOR, + "if": _IF, + "else": _ELSE, + "in": _IN, + "is": _IS, + "lambda": _LAMBDA, + "not": _NOT, + "or": _OR, + "def": _DEF, +} + +// Python scanning. +// About 1% of BUILD files embed arbitrary Python into the file. +// We do not attempt to parse it. Instead, we lex just enough to scan +// beyond it, treating the Python block as an unintepreted blob. + +// hasPythonPrefix reports whether p begins with a keyword that would +// introduce an uninterpreted Python block. +func hasPythonPrefix(p []byte) bool { + if tables.FormatBzlFiles { + return false + } + + for _, pre := range prefixes { + if hasPrefixSpace(p, pre) { + return true + } + } + return false +} + +// These keywords introduce uninterpreted Python blocks. +var prefixes = []string{ + "assert", + "class", + "def", + "del", + "for", + "if", + "try", +} + +// hasPrefixSpace reports whether p begins with pre followed by a space or colon. +func hasPrefixSpace(p []byte, pre string) bool { + if len(p) <= len(pre) || p[len(pre)] != ' ' && p[len(pre)] != '\t' && p[len(pre)] != ':' { + return false + } + for i := range pre { + if p[i] != pre[i] { + return false + } + } + return true +} + +func isBlankOrComment(b []byte) bool { + for _, c := range b { + if c == '#' || c == '\n' { + return true + } + if c != ' ' && c != '\t' && c != '\r' { + return false + } + } + return true +} + +// hasPythonContinuation reports whether p begins with a keyword that +// continues an uninterpreted Python block. +func hasPythonContinuation(p []byte) bool { + for _, pre := range continuations { + if hasPrefixSpace(p, pre) { + return true + } + } + return false +} + +// These keywords continue uninterpreted Python blocks. +var continuations = []string{ + "except", + "else", +} + +// skipStmt returns the data remaining after the uninterpreted +// Python block beginning at p. It does not advance the input position. +// (The only reason for the input receiver is to be able to call in.Error.) +func (in *input) skipStmt(p []byte) []byte { + quote := byte(0) // if non-zero, the kind of quote we're in + tripleQuote := false // if true, the quote is a triple quote + depth := 0 // nesting depth for ( ) [ ] { } + var rest []byte // data after the Python block + + defer func() { + if quote != 0 { + in.Error("EOF scanning Python quoted string") + } + }() + + // Scan over input one byte at a time until we find + // an unindented, non-blank, non-comment line + // outside quoted strings and brackets. + for i := 0; i < len(p); i++ { + c := p[i] + if quote != 0 && c == quote && !tripleQuote { + quote = 0 + continue + } + if quote != 0 && c == quote && tripleQuote && i+2 < len(p) && p[i+1] == quote && p[i+2] == quote { + i += 2 + quote = 0 + tripleQuote = false + continue + } + if quote != 0 { + if c == '\\' { + i++ // skip escaped char + } + continue + } + if c == '\'' || c == '"' { + if i+2 < len(p) && p[i+1] == c && p[i+2] == c { + quote = c + tripleQuote = true + i += 2 + continue + } + quote = c + continue + } + + if depth == 0 && i > 0 && p[i-1] == '\n' && (i < 2 || p[i-2] != '\\') { + // Possible stopping point. Save the earliest one we find. + if rest == nil { + rest = p[i:] + } + + if tables.FormatBzlFiles { + // In the bzl files mode we only care about the end of the statement, we've found it. + return rest + } + // In the legacy mode we need to find where the current block ends + if !isBlankOrComment(p[i:]) { + if !hasPythonContinuation(p[i:]) && c != ' ' && c != '\t' { + // Yes, stop here. + return rest + } + // Not a stopping point after all. + rest = nil + } + } + + switch c { + case '#': + // Skip comment. + for i < len(p) && p[i] != '\n' { + i++ + } + + case '(', '[', '{': + depth++ + + case ')', ']', '}': + depth-- + } + } + return rest +} + +// Comment assignment. +// We build two lists of all subexpressions, preorder and postorder. +// The preorder list is ordered by start location, with outer expressions first. +// The postorder list is ordered by end location, with outer expressions last. +// We use the preorder list to assign each whole-line comment to the syntax +// immediately following it, and we use the postorder list to assign each +// end-of-line comment to the syntax immediately preceding it. + +// order walks the expression adding it and its subexpressions to the +// preorder and postorder lists. +func (in *input) order(v Expr) { + if v != nil { + in.pre = append(in.pre, v) + } + switch v := v.(type) { + default: + panic(fmt.Errorf("order: unexpected type %T", v)) + case nil: + // nothing + case *End: + // nothing + case *File: + for _, stmt := range v.Stmt { + in.order(stmt) + } + case *CommentBlock: + // nothing + case *CallExpr: + in.order(v.X) + for _, x := range v.List { + in.order(x) + } + in.order(&v.End) + case *PythonBlock: + // nothing + case *LiteralExpr: + // nothing + case *StringExpr: + // nothing + case *DotExpr: + in.order(v.X) + case *ListExpr: + for _, x := range v.List { + in.order(x) + } + in.order(&v.End) + case *ListForExpr: + in.order(v.X) + for _, c := range v.For { + in.order(c) + } + in.order(&v.End) + case *SetExpr: + for _, x := range v.List { + in.order(x) + } + in.order(&v.End) + case *ForClauseWithIfClausesOpt: + in.order(v.For) + for _, c := range v.Ifs { + in.order(c) + } + case *ForClause: + for _, name := range v.Var { + in.order(name) + } + in.order(v.Expr) + case *IfClause: + in.order(v.Cond) + case *KeyValueExpr: + in.order(v.Key) + in.order(v.Value) + case *DictExpr: + for _, x := range v.List { + in.order(x) + } + in.order(&v.End) + case *TupleExpr: + for _, x := range v.List { + in.order(x) + } + in.order(&v.End) + case *UnaryExpr: + in.order(v.X) + case *BinaryExpr: + in.order(v.X) + in.order(v.Y) + case *ConditionalExpr: + in.order(v.Then) + in.order(v.Test) + in.order(v.Else) + case *ParenExpr: + in.order(v.X) + in.order(&v.End) + case *SliceExpr: + in.order(v.X) + in.order(v.From) + in.order(v.To) + in.order(v.Step) + case *IndexExpr: + in.order(v.X) + in.order(v.Y) + case *LambdaExpr: + for _, name := range v.Var { + in.order(name) + } + in.order(v.Expr) + case *FuncDef: + for _, x := range v.Args { + in.order(x) + } + for _, x := range v.Body.Statements { + in.order(x) + } + } + if v != nil { + in.post = append(in.post, v) + } +} + +// assignComments attaches comments to nearby syntax. +func (in *input) assignComments() { + // Generate preorder and postorder lists. + in.order(in.file) + + // Split into whole-line comments and suffix comments. + var line, suffix []Comment + for _, com := range in.comments { + if com.Suffix { + suffix = append(suffix, com) + } else { + line = append(line, com) + } + } + + // Assign line comments to syntax immediately following. + for _, x := range in.pre { + start, _ := x.Span() + xcom := x.Comment() + for len(line) > 0 && start.Byte >= line[0].Start.Byte { + xcom.Before = append(xcom.Before, line[0]) + line = line[1:] + } + } + + // Remaining line comments go at end of file. + in.file.After = append(in.file.After, line...) + + // Assign suffix comments to syntax immediately before. + for i := len(in.post) - 1; i >= 0; i-- { + x := in.post[i] + + // Do not assign suffix comments to file + switch x.(type) { + case *File: + continue + } + + _, end := x.Span() + xcom := x.Comment() + for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte { + xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1]) + suffix = suffix[:len(suffix)-1] + } + } + + // We assigned suffix comments in reverse. + // If multiple suffix comments were appended to the same + // expression node, they are now in reverse. Fix that. + for _, x := range in.post { + reverseComments(x.Comment().Suffix) + } + + // Remaining suffix comments go at beginning of file. + in.file.Before = append(in.file.Before, suffix...) +} + +// reverseComments reverses the []Comment list. +func reverseComments(list []Comment) { + for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 { + list[i], list[j] = list[j], list[i] + } +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/parse.y b/vendor/github.com/bazelbuild/buildtools/build/parse.y new file mode 100644 index 00000000000..3a05452613a --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/parse.y @@ -0,0 +1,720 @@ +// BUILD file parser. + +// This is a yacc grammar. Its lexer is in lex.go. +// +// For a good introduction to writing yacc grammars, see +// Kernighan and Pike's book The Unix Programming Environment. +// +// The definitive yacc manual is +// Stephen C. Johnson and Ravi Sethi, "Yacc: A Parser Generator", +// online at http://plan9.bell-labs.com/sys/doc/yacc.pdf. + +%{ +package build +%} + +// The generated parser puts these fields in a struct named yySymType. +// (The name %union is historical, but it is inaccurate for Go.) +%union { + // input tokens + tok string // raw input syntax + str string // decoding of quoted string + pos Position // position of token + triple bool // was string triple quoted? + + // partial syntax trees + expr Expr + exprs []Expr + forc *ForClause + ifs []*IfClause + forifs *ForClauseWithIfClausesOpt + forsifs []*ForClauseWithIfClausesOpt + string *StringExpr + strings []*StringExpr + block CodeBlock + + // supporting information + comma Position // position of trailing comma in list, if present + lastRule Expr // most recent rule, to attach line comments to +} + +// These declarations set the type for a $ reference ($$, $1, $2, ...) +// based on the kind of symbol it refers to. Other fields can be referred +// to explicitly, as in $1. +// +// %token is for input tokens generated by the lexer. +// %type is for higher-level grammar rules defined here. +// +// It is possible to put multiple tokens per line, but it is easier to +// keep ordered using a sparser one-per-line list. + +%token '%' +%token '(' +%token ')' +%token '*' +%token '+' +%token ',' +%token '-' +%token '.' +%token '/' +%token ':' +%token '<' +%token '=' +%token '>' +%token '[' +%token ']' +%token '{' +%token '}' + +// By convention, yacc token names are all caps. +// However, we do not want to export them from the Go package +// we are creating, so prefix them all with underscores. + +%token _ADDEQ // operator += +%token _AND // keyword and +%token _COMMENT // top-level # comment +%token _EOF // end of file +%token _EQ // operator == +%token _FOR // keyword for +%token _GE // operator >= +%token _IDENT // non-keyword identifier or number +%token _IF // keyword if +%token _ELSE // keyword else +%token _IN // keyword in +%token _IS // keyword is +%token _LAMBDA // keyword lambda +%token _LE // operator <= +%token _NE // operator != +%token _NOT // keyword not +%token _OR // keyword or +%token _PYTHON // uninterpreted Python block +%token _STRING // quoted string +%token _DEF // keyword def +%token _INDENT // indentation +%token _UNINDENT // unindentation + +%type comma_opt +%type expr +%type expr_opt +%type primary_expr +%type exprs +%type exprs_opt +%type primary_exprs +%type for_clause +%type for_clause_with_if_clauses_opt +%type for_clauses_with_if_clauses_opt +%type ident +%type if_clauses_opt +%type stmts +%type stmt +%type keyvalue +%type keyvalues +%type keyvalues_no_comma +%type string +%type strings +%type block + +// Operator precedence. +// Operators listed lower in the table bind tighter. + +// We tag rules with this fake, low precedence to indicate +// that when the rule is involved in a shift/reduce +// conflict, we prefer that the parser shift (try for a longer parse). +// Shifting is the default resolution anyway, but stating it explicitly +// silences yacc's warning for that specific case. +%left ShiftInstead + +%left '\n' +%left _ASSERT +// '=' and '+=' have the lowest precedence +// e.g. "x = a if c > 0 else 'bar'" +// followed by +// 'if' and 'else' which have lower precedence than all other operators. +// e.g. "a, b if c > 0 else 'foo'" is either a tuple of (a,b) or 'foo' +// and not a tuple of "(a, (b if ... ))" +%left '=' _ADDEQ +%left _IF _ELSE +%left ',' +%left ':' +%left _IN _NOT _IS +%left _OR +%left _AND +%left '<' '>' _EQ _NE _LE _GE +%left '+' '-' +%left '*' '/' '%' +%left '.' '[' '(' +%right _UNARY +%left _STRING + +%% + +// Grammar rules. +// +// A note on names: if foo is a rule, then foos is a sequence of foos +// (with interleaved commas or other syntax as appropriate) +// and foo_opt is an optional foo. + +file: + stmts _EOF + { + yylex.(*input).file = &File{Stmt: $1} + return 0 + } + +block: + _INDENT stmts _UNINDENT + { + $$ = CodeBlock{ + Start: $1, + Statements: $2, + End: End{Pos: $3}, + } + } + +stmts: + { + $$ = nil + $$ = nil + } +| stmts stmt comma_opt semi_opt + { + // If this statement follows a comment block, + // attach the comments to the statement. + if cb, ok := $1.(*CommentBlock); ok { + $$ = $1 + $$[len($1)-1] = $2 + $2.Comment().Before = cb.After + $$ = $2 + break + } + + // Otherwise add to list. + $$ = append($1, $2) + $$ = $2 + + // Consider this input: + // + // foo() + // # bar + // baz() + // + // If we've just parsed baz(), the # bar is attached to + // foo() as an After comment. Make it a Before comment + // for baz() instead. + if x := $1; x != nil { + com := x.Comment() + $2.Comment().Before = com.After + com.After = nil + } + } +| stmts '\n' + { + // Blank line; sever last rule from future comments. + $$ = $1 + $$ = nil + } +| stmts _COMMENT + { + $$ = $1 + $$ = $1 + if $$ == nil { + cb := &CommentBlock{Start: $2} + $$ = append($$, cb) + $$ = cb + } + com := $$.Comment() + com.After = append(com.After, Comment{Start: $2, Token: $2}) + } + +stmt: + expr %prec ShiftInstead +| _PYTHON + { + $$ = &PythonBlock{Start: $1, Token: $1} + } + +semi_opt: +| semi_opt ';' + +primary_expr: + ident +| primary_expr '.' _IDENT + { + $$ = &DotExpr{ + X: $1, + Dot: $2, + NamePos: $3, + Name: $3, + } + } +| primary_expr '(' exprs_opt ')' + { + $$ = &CallExpr{ + X: $1, + ListStart: $2, + List: $3, + End: End{Pos: $4}, + ForceCompact: forceCompact($2, $3, $4), + ForceMultiLine: forceMultiLine($2, $3, $4), + } + } +| primary_expr '[' expr ']' + { + $$ = &IndexExpr{ + X: $1, + IndexStart: $2, + Y: $3, + End: $4, + } + } +| primary_expr '[' expr_opt ':' expr_opt ']' + { + $$ = &SliceExpr{ + X: $1, + SliceStart: $2, + From: $3, + FirstColon: $4, + To: $5, + End: $6, + } + } +| primary_expr '[' expr_opt ':' expr_opt ':' expr_opt ']' + { + $$ = &SliceExpr{ + X: $1, + SliceStart: $2, + From: $3, + FirstColon: $4, + To: $5, + SecondColon: $6, + Step: $7, + End: $8, + } + } +| primary_expr '(' expr for_clauses_with_if_clauses_opt ')' + { + $$ = &CallExpr{ + X: $1, + ListStart: $2, + List: []Expr{ + &ListForExpr{ + Brack: "", + Start: $2, + X: $3, + For: $4, + End: End{Pos: $5}, + }, + }, + End: End{Pos: $5}, + } + } +| strings %prec ShiftInstead + { + if len($1) == 1 { + $$ = $1[0] + break + } + $$ = $1[0] + for _, x := range $1[1:] { + _, end := $$.Span() + $$ = binary($$, end, "+", x) + } + } +| '[' exprs_opt ']' + { + $$ = &ListExpr{ + Start: $1, + List: $2, + Comma: $2, + End: End{Pos: $3}, + ForceMultiLine: forceMultiLine($1, $2, $3), + } + } +| '[' expr for_clauses_with_if_clauses_opt ']' + { + exprStart, _ := $2.Span() + $$ = &ListForExpr{ + Brack: "[]", + Start: $1, + X: $2, + For: $3, + End: End{Pos: $4}, + ForceMultiLine: $1.Line != exprStart.Line, + } + } +| '(' expr for_clauses_with_if_clauses_opt ')' + { + exprStart, _ := $2.Span() + $$ = &ListForExpr{ + Brack: "()", + Start: $1, + X: $2, + For: $3, + End: End{Pos: $4}, + ForceMultiLine: $1.Line != exprStart.Line, + } + } +| '{' keyvalue for_clauses_with_if_clauses_opt '}' + { + exprStart, _ := $2.Span() + $$ = &ListForExpr{ + Brack: "{}", + Start: $1, + X: $2, + For: $3, + End: End{Pos: $4}, + ForceMultiLine: $1.Line != exprStart.Line, + } + } +| '{' keyvalues '}' + { + $$ = &DictExpr{ + Start: $1, + List: $2, + Comma: $2, + End: End{Pos: $3}, + ForceMultiLine: forceMultiLine($1, $2, $3), + } + } +| '{' exprs_opt '}' + { + $$ = &SetExpr{ + Start: $1, + List: $2, + Comma: $2, + End: End{Pos: $3}, + ForceMultiLine: forceMultiLine($1, $2, $3), + } + } +| '(' exprs_opt ')' + { + if len($2) == 1 && $2.Line == 0 { + // Just a parenthesized expression, not a tuple. + $$ = &ParenExpr{ + Start: $1, + X: $2[0], + End: End{Pos: $3}, + ForceMultiLine: forceMultiLine($1, $2, $3), + } + } else { + $$ = &TupleExpr{ + Start: $1, + List: $2, + Comma: $2, + End: End{Pos: $3}, + ForceCompact: forceCompact($1, $2, $3), + ForceMultiLine: forceMultiLine($1, $2, $3), + } + } + } +| '-' primary_expr %prec _UNARY { $$ = unary($1, $1, $2) } + +expr: + primary_expr +| _LAMBDA exprs ':' expr + { + $$ = &LambdaExpr{ + Lambda: $1, + Var: $2, + Colon: $3, + Expr: $4, + } + } +| _NOT expr %prec _UNARY { $$ = unary($1, $1, $2) } +| '*' expr %prec _UNARY { $$ = unary($1, $1, $2) } +| expr '*' expr { $$ = binary($1, $2, $2, $3) } +| expr '%' expr { $$ = binary($1, $2, $2, $3) } +| expr '/' expr { $$ = binary($1, $2, $2, $3) } +| expr '+' expr { $$ = binary($1, $2, $2, $3) } +| expr '-' expr { $$ = binary($1, $2, $2, $3) } +| expr '<' expr { $$ = binary($1, $2, $2, $3) } +| expr '>' expr { $$ = binary($1, $2, $2, $3) } +| expr _EQ expr { $$ = binary($1, $2, $2, $3) } +| expr _LE expr { $$ = binary($1, $2, $2, $3) } +| expr _NE expr { $$ = binary($1, $2, $2, $3) } +| expr _GE expr { $$ = binary($1, $2, $2, $3) } +| expr '=' expr { $$ = binary($1, $2, $2, $3) } +| expr _ADDEQ expr { $$ = binary($1, $2, $2, $3) } +| expr _IN expr { $$ = binary($1, $2, $2, $3) } +| expr _NOT _IN expr { $$ = binary($1, $2, "not in", $4) } +| expr _OR expr { $$ = binary($1, $2, $2, $3) } +| expr _AND expr { $$ = binary($1, $2, $2, $3) } +| expr _IS expr + { + if b, ok := $3.(*UnaryExpr); ok && b.Op == "not" { + $$ = binary($1, $2, "is not", b.X) + } else { + $$ = binary($1, $2, $2, $3) + } + } +| expr _IF expr _ELSE expr + { + $$ = &ConditionalExpr{ + Then: $1, + IfStart: $2, + Test: $3, + ElseStart: $4, + Else: $5, + } + } +| _DEF _IDENT '(' exprs_opt ')' ':' block + // TODO: support one-line function definitions + { + $$ = &FuncDef{ + Start: $1, + Name: $2, + ListStart: $3, + Args: $4, + Body: $7, + End: $7.End, + ForceCompact: forceCompact($3, $4, $5), + ForceMultiLine: forceMultiLine($3, $4, $5), + } + } + +expr_opt: + { + $$ = nil + } +| expr + +// comma_opt is an optional comma. If the comma is present, +// the rule's value is the position of the comma. Otherwise +// the rule's value is the zero position. Tracking this +// lets us distinguish (x) and (x,). +comma_opt: + { + $$ = Position{} + } +| ',' + +keyvalue: + expr ':' expr { + $$ = &KeyValueExpr{ + Key: $1, + Colon: $2, + Value: $3, + } + } + +keyvalues_no_comma: + keyvalue + { + $$ = []Expr{$1} + } +| keyvalues_no_comma ',' keyvalue + { + $$ = append($1, $3) + } + +keyvalues: + keyvalues_no_comma + { + $$ = $1 + } +| keyvalues_no_comma ',' + { + $$ = $1 + } + +exprs: + expr + { + $$ = []Expr{$1} + } +| exprs ',' expr + { + $$ = append($1, $3) + } + +exprs_opt: + { + $$, $$ = nil, Position{} + } +| exprs comma_opt + { + $$, $$ = $1, $2 + } + +primary_exprs: + primary_expr + { + $$ = []Expr{$1} + } +| primary_exprs ',' primary_expr + { + $$ = append($1, $3) + } + +string: + _STRING + { + $$ = &StringExpr{ + Start: $1, + Value: $1, + TripleQuote: $1, + End: $1.add($1), + Token: $1, + } + } + +strings: + string + { + $$ = []*StringExpr{$1} + } +| strings string + { + $$ = append($1, $2) + } + +ident: + _IDENT + { + $$ = &LiteralExpr{Start: $1, Token: $1} + } + +for_clause: + _FOR primary_exprs _IN expr + { + $$ = &ForClause{ + For: $1, + Var: $2, + In: $3, + Expr: $4, + } + } + +for_clause_with_if_clauses_opt: + for_clause if_clauses_opt { + $$ = &ForClauseWithIfClausesOpt{ + For: $1, + Ifs: $2, + } + } + +for_clauses_with_if_clauses_opt: + for_clause_with_if_clauses_opt + { + $$ = []*ForClauseWithIfClausesOpt{$1} + } +| for_clauses_with_if_clauses_opt for_clause_with_if_clauses_opt { + $$ = append($1, $2) + } + +if_clauses_opt: + { + $$ = nil + } +| if_clauses_opt _IF expr + { + $$ = append($1, &IfClause{ + If: $2, + Cond: $3, + }) + } + +%% + +// Go helper code. + +// unary returns a unary expression with the given +// position, operator, and subexpression. +func unary(pos Position, op string, x Expr) Expr { + return &UnaryExpr{ + OpStart: pos, + Op: op, + X: x, + } +} + +// binary returns a binary expression with the given +// operands, position, and operator. +func binary(x Expr, pos Position, op string, y Expr) Expr { + _, xend := x.Span() + ystart, _ := y.Span() + return &BinaryExpr{ + X: x, + OpStart: pos, + Op: op, + LineBreak: xend.Line < ystart.Line, + Y: y, + } +} + +// forceCompact returns the setting for the ForceCompact field for a call or tuple. +// +// NOTE 1: The field is called ForceCompact, not ForceSingleLine, +// because it only affects the formatting associated with the call or tuple syntax, +// not the formatting of the arguments. For example: +// +// call([ +// 1, +// 2, +// 3, +// ]) +// +// is still a compact call even though it runs on multiple lines. +// +// In contrast the multiline form puts a linebreak after the (. +// +// call( +// [ +// 1, +// 2, +// 3, +// ], +// ) +// +// NOTE 2: Because of NOTE 1, we cannot use start and end on the +// same line as a signal for compact mode: the formatting of an +// embedded list might move the end to a different line, which would +// then look different on rereading and cause buildifier not to be +// idempotent. Instead, we have to look at properties guaranteed +// to be preserved by the reformatting, namely that the opening +// paren and the first expression are on the same line and that +// each subsequent expression begins on the same line as the last +// one ended (no line breaks after comma). +func forceCompact(start Position, list []Expr, end Position) bool { + if len(list) <= 1 { + // The call or tuple will probably be compact anyway; don't force it. + return false + } + + // If there are any named arguments or non-string, non-literal + // arguments, cannot force compact mode. + line := start.Line + for _, x := range list { + start, end := x.Span() + if start.Line != line { + return false + } + line = end.Line + switch x.(type) { + case *LiteralExpr, *StringExpr, *UnaryExpr: + // ok + default: + return false + } + } + return end.Line == line +} + +// forceMultiLine returns the setting for the ForceMultiLine field. +func forceMultiLine(start Position, list []Expr, end Position) bool { + if len(list) > 1 { + // The call will be multiline anyway, because it has multiple elements. Don't force it. + return false + } + + if len(list) == 0 { + // Empty list: use position of brackets. + return start.Line != end.Line + } + + // Single-element list. + // Check whether opening bracket is on different line than beginning of + // element, or closing bracket is on different line than end of element. + elemStart, elemEnd := list[0].Span() + return start.Line != elemStart.Line || end.Line != elemEnd.Line +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/parse.y.go b/vendor/github.com/bazelbuild/buildtools/build/parse.y.go new file mode 100755 index 00000000000..53a57f52725 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/parse.y.go @@ -0,0 +1,1343 @@ +//line build/parse.y:13 +package build + +import __yyfmt__ "fmt" + +//line build/parse.y:13 +//line build/parse.y:18 +type yySymType struct { + yys int + // input tokens + tok string // raw input syntax + str string // decoding of quoted string + pos Position // position of token + triple bool // was string triple quoted? + + // partial syntax trees + expr Expr + exprs []Expr + forc *ForClause + ifs []*IfClause + forifs *ForClauseWithIfClausesOpt + forsifs []*ForClauseWithIfClausesOpt + string *StringExpr + strings []*StringExpr + block CodeBlock + + // supporting information + comma Position // position of trailing comma in list, if present + lastRule Expr // most recent rule, to attach line comments to +} + +const _ADDEQ = 57346 +const _AND = 57347 +const _COMMENT = 57348 +const _EOF = 57349 +const _EQ = 57350 +const _FOR = 57351 +const _GE = 57352 +const _IDENT = 57353 +const _IF = 57354 +const _ELSE = 57355 +const _IN = 57356 +const _IS = 57357 +const _LAMBDA = 57358 +const _LE = 57359 +const _NE = 57360 +const _NOT = 57361 +const _OR = 57362 +const _PYTHON = 57363 +const _STRING = 57364 +const _DEF = 57365 +const _INDENT = 57366 +const _UNINDENT = 57367 +const ShiftInstead = 57368 +const _ASSERT = 57369 +const _UNARY = 57370 + +var yyToknames = [...]string{ + "$end", + "error", + "$unk", + "'%'", + "'('", + "')'", + "'*'", + "'+'", + "','", + "'-'", + "'.'", + "'/'", + "':'", + "'<'", + "'='", + "'>'", + "'['", + "']'", + "'{'", + "'}'", + "_ADDEQ", + "_AND", + "_COMMENT", + "_EOF", + "_EQ", + "_FOR", + "_GE", + "_IDENT", + "_IF", + "_ELSE", + "_IN", + "_IS", + "_LAMBDA", + "_LE", + "_NE", + "_NOT", + "_OR", + "_PYTHON", + "_STRING", + "_DEF", + "_INDENT", + "_UNINDENT", + "ShiftInstead", + "'\\n'", + "_ASSERT", + "_UNARY", + "';'", +} +var yyStatenames = [...]string{} + +const yyEofCode = 1 +const yyErrCode = 2 +const yyInitialStackSize = 16 + +//line build/parse.y:617 + +// Go helper code. + +// unary returns a unary expression with the given +// position, operator, and subexpression. +func unary(pos Position, op string, x Expr) Expr { + return &UnaryExpr{ + OpStart: pos, + Op: op, + X: x, + } +} + +// binary returns a binary expression with the given +// operands, position, and operator. +func binary(x Expr, pos Position, op string, y Expr) Expr { + _, xend := x.Span() + ystart, _ := y.Span() + return &BinaryExpr{ + X: x, + OpStart: pos, + Op: op, + LineBreak: xend.Line < ystart.Line, + Y: y, + } +} + +// forceCompact returns the setting for the ForceCompact field for a call or tuple. +// +// NOTE 1: The field is called ForceCompact, not ForceSingleLine, +// because it only affects the formatting associated with the call or tuple syntax, +// not the formatting of the arguments. For example: +// +// call([ +// 1, +// 2, +// 3, +// ]) +// +// is still a compact call even though it runs on multiple lines. +// +// In contrast the multiline form puts a linebreak after the (. +// +// call( +// [ +// 1, +// 2, +// 3, +// ], +// ) +// +// NOTE 2: Because of NOTE 1, we cannot use start and end on the +// same line as a signal for compact mode: the formatting of an +// embedded list might move the end to a different line, which would +// then look different on rereading and cause buildifier not to be +// idempotent. Instead, we have to look at properties guaranteed +// to be preserved by the reformatting, namely that the opening +// paren and the first expression are on the same line and that +// each subsequent expression begins on the same line as the last +// one ended (no line breaks after comma). +func forceCompact(start Position, list []Expr, end Position) bool { + if len(list) <= 1 { + // The call or tuple will probably be compact anyway; don't force it. + return false + } + + // If there are any named arguments or non-string, non-literal + // arguments, cannot force compact mode. + line := start.Line + for _, x := range list { + start, end := x.Span() + if start.Line != line { + return false + } + line = end.Line + switch x.(type) { + case *LiteralExpr, *StringExpr, *UnaryExpr: + // ok + default: + return false + } + } + return end.Line == line +} + +// forceMultiLine returns the setting for the ForceMultiLine field. +func forceMultiLine(start Position, list []Expr, end Position) bool { + if len(list) > 1 { + // The call will be multiline anyway, because it has multiple elements. Don't force it. + return false + } + + if len(list) == 0 { + // Empty list: use position of brackets. + return start.Line != end.Line + } + + // Single-element list. + // Check whether opening bracket is on different line than beginning of + // element, or closing bracket is on different line than end of element. + elemStart, elemEnd := list[0].Span() + return start.Line != elemStart.Line || end.Line != elemEnd.Line +} + +//line yacctab:1 +var yyExca = [...]int{ + -1, 1, + 1, -1, + -2, 0, +} + +const yyNprod = 76 +const yyPrivate = 57344 + +var yyTokenNames []string +var yyStates []string + +const yyLast = 508 + +var yyAct = [...]int{ + + 7, 2, 88, 53, 9, 58, 93, 106, 94, 142, + 22, 48, 49, 50, 23, 79, 131, 54, 56, 61, + 84, 57, 60, 132, 63, 51, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, + 103, 80, 81, 82, 83, 133, 86, 87, 17, 85, + 12, 122, 127, 19, 121, 96, 116, 96, 143, 102, + 16, 45, 18, 99, 96, 101, 6, 44, 92, 105, + 98, 20, 96, 46, 96, 136, 10, 112, 21, 11, + 107, 8, 22, 13, 135, 145, 55, 5, 97, 134, + 113, 114, 48, 110, 52, 115, 26, 47, 114, 25, + 24, 120, 117, 90, 27, 123, 125, 89, 117, 126, + 117, 124, 17, 129, 12, 128, 130, 19, 109, 117, + 100, 91, 64, 1, 16, 141, 18, 15, 62, 59, + 6, 3, 137, 4, 139, 20, 129, 138, 140, 118, + 10, 14, 95, 11, 144, 8, 22, 13, 119, 26, + 0, 5, 25, 28, 0, 29, 0, 27, 104, 30, + 36, 31, 0, 0, 0, 26, 37, 41, 25, 28, + 32, 29, 35, 27, 43, 0, 38, 42, 0, 33, + 34, 39, 40, 26, 0, 0, 25, 28, 0, 29, + 0, 27, 0, 30, 36, 31, 0, 111, 0, 0, + 37, 41, 0, 0, 32, 0, 35, 0, 43, 0, + 38, 42, 0, 33, 34, 39, 40, 26, 0, 0, + 25, 28, 0, 29, 0, 27, 0, 30, 36, 31, + 0, 0, 0, 0, 37, 41, 0, 0, 32, 96, + 35, 0, 43, 0, 38, 42, 0, 33, 34, 39, + 40, 26, 0, 0, 25, 28, 0, 29, 0, 27, + 0, 30, 36, 31, 0, 0, 0, 0, 37, 41, + 0, 0, 32, 0, 35, 0, 43, 108, 38, 42, + 0, 33, 34, 39, 40, 26, 0, 0, 25, 28, + 0, 29, 0, 27, 0, 30, 36, 31, 0, 0, + 0, 0, 37, 41, 0, 0, 32, 0, 35, 0, + 43, 0, 38, 42, 0, 33, 34, 39, 40, 26, + 0, 17, 25, 28, 0, 29, 19, 27, 0, 30, + 36, 31, 0, 16, 0, 18, 37, 41, 0, 0, + 32, 0, 35, 0, 20, 0, 38, 42, 0, 33, + 34, 39, 40, 26, 0, 22, 25, 28, 0, 29, + 0, 27, 0, 30, 0, 31, 0, 0, 0, 0, + 0, 41, 0, 0, 32, 0, 35, 0, 43, 0, + 38, 42, 0, 33, 34, 39, 40, 26, 0, 0, + 25, 28, 0, 29, 0, 27, 0, 30, 0, 31, + 0, 0, 0, 0, 0, 41, 0, 0, 32, 0, + 35, 17, 0, 12, 38, 42, 19, 33, 34, 39, + 40, 0, 0, 16, 0, 18, 0, 0, 0, 0, + 0, 0, 0, 0, 20, 0, 0, 0, 0, 10, + 0, 0, 11, 0, 26, 22, 13, 25, 28, 0, + 29, 0, 27, 0, 30, 0, 31, 0, 0, 0, + 26, 0, 41, 25, 28, 32, 29, 35, 27, 0, + 30, 0, 31, 0, 33, 34, 26, 40, 41, 25, + 28, 32, 29, 35, 27, 0, 30, 0, 31, 0, + 33, 34, 0, 0, 0, 0, 0, 32, 0, 35, + 0, 0, 0, 0, 0, 0, 33, 34, +} +var yyPact = [...]int{ + + -1000, -1000, 107, -1000, 91, -1000, -1000, 281, -1000, 56, + 406, 406, 406, -3, -1000, -29, 406, 406, 406, 316, + -1000, -1000, -1000, -1000, -1000, 406, 406, 406, 406, 406, + 406, 406, 406, 406, 406, 406, 406, 406, 406, -16, + 406, 406, 406, 406, -8, 406, 406, 94, 281, -1000, + -1000, 116, -1000, 50, 213, 79, 213, 114, 29, 39, + 20, 145, 60, -1000, -40, -1000, -1000, -1000, 92, 92, + 161, 161, 161, 161, 161, 161, 349, 349, 440, 406, + 456, 472, 440, 247, -1000, 112, 213, 179, 64, 406, + 406, 406, -1000, 38, -1000, -1000, 316, 406, -1000, 48, + -1000, 31, -1000, -1000, 406, 406, -1000, 440, 406, -1000, + 46, -1000, 406, 383, 281, 110, -1000, -1000, -13, 14, + 56, -1000, -1000, 281, -1000, 145, 383, -1000, 71, 281, + 62, 406, 316, 406, -1000, 406, -32, 315, 56, 315, + 40, -1000, -1000, -1000, 43, -1000, +} +var yyPgo = [...]int{ + + 0, 14, 0, 2, 4, 86, 3, 148, 142, 8, + 6, 141, 139, 1, 133, 5, 129, 128, 78, 127, + 125, 123, 122, +} +var yyR1 = [...]int{ + + 0, 21, 20, 13, 13, 13, 13, 14, 14, 22, + 22, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 3, 3, 1, 1, 15, 17, 17, 16, 16, + 5, 5, 6, 6, 7, 7, 18, 19, 19, 11, + 8, 9, 10, 10, 12, 12, +} +var yyR2 = [...]int{ + + 0, 2, 3, 0, 4, 2, 2, 1, 1, 0, + 2, 1, 3, 4, 4, 6, 8, 5, 1, 3, + 4, 4, 4, 3, 3, 3, 2, 1, 4, 2, + 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 4, 3, 3, 3, 5, + 7, 0, 1, 0, 1, 3, 1, 3, 1, 2, + 1, 3, 0, 2, 1, 3, 1, 1, 2, 1, + 4, 2, 1, 2, 0, 3, +} +var yyChk = [...]int{ + + -1000, -21, -13, 24, -14, 44, 23, -2, 38, -4, + 33, 36, 7, 40, -11, -19, 17, 5, 19, 10, + 28, -18, 39, -1, 9, 7, 4, 12, 8, 10, + 14, 16, 25, 34, 35, 27, 15, 21, 31, 36, + 37, 22, 32, 29, 11, 5, 17, -5, -2, -2, + -2, 28, -18, -6, -2, -5, -2, -6, -15, -16, + -6, -2, -17, -4, -22, -2, -2, -2, -2, -2, + -2, -2, -2, -2, -2, -2, -2, -2, -2, 31, + -2, -2, -2, -2, 28, -6, -2, -2, -3, 13, + 9, 5, 18, -10, -9, -8, 26, 9, -1, -10, + 6, -10, 20, 20, 13, 9, 47, -2, 30, 6, + -10, 18, 13, -2, -2, -6, 18, -9, -12, -7, + -4, 6, 20, -2, -15, -2, -2, 6, -3, -2, + 6, 29, 9, 31, 18, 13, 13, -2, -4, -2, + -3, -20, 41, 18, -13, 42, +} +var yyDef = [...]int{ + + 3, -2, 0, 1, 53, 5, 6, 7, 8, 27, + 0, 0, 0, 0, 11, 18, 62, 62, 62, 0, + 69, 67, 66, 9, 54, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 62, 51, 0, 60, 29, + 30, 0, 68, 0, 60, 53, 60, 0, 56, 0, + 0, 60, 58, 26, 4, 31, 32, 33, 34, 35, + 36, 37, 38, 39, 40, 41, 42, 43, 44, 0, + 46, 47, 48, 0, 12, 0, 60, 52, 0, 0, + 0, 62, 19, 0, 72, 74, 0, 54, 63, 0, + 25, 0, 23, 24, 0, 59, 10, 45, 0, 13, + 0, 14, 51, 28, 61, 0, 20, 73, 71, 0, + 64, 21, 22, 55, 57, 0, 49, 17, 0, 52, + 0, 0, 0, 0, 15, 51, 0, 75, 65, 70, + 0, 50, 3, 16, 0, 2, +} +var yyTok1 = [...]int{ + + 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 44, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, + 5, 6, 7, 8, 9, 10, 11, 12, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 13, 47, + 14, 15, 16, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 17, 3, 18, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 19, 3, 20, +} +var yyTok2 = [...]int{ + + 2, 3, 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 45, 46, +} +var yyTok3 = [...]int{ + 0, +} + +var yyErrorMessages = [...]struct { + state int + token int + msg string +}{} + +//line yaccpar:1 + +/* parser for yacc output */ + +var ( + yyDebug = 0 + yyErrorVerbose = false +) + +type yyLexer interface { + Lex(lval *yySymType) int + Error(s string) +} + +type yyParser interface { + Parse(yyLexer) int + Lookahead() int +} + +type yyParserImpl struct { + lval yySymType + stack [yyInitialStackSize]yySymType + char int +} + +func (p *yyParserImpl) Lookahead() int { + return p.char +} + +func yyNewParser() yyParser { + return &yyParserImpl{} +} + +const yyFlag = -1000 + +func yyTokname(c int) string { + if c >= 1 && c-1 < len(yyToknames) { + if yyToknames[c-1] != "" { + return yyToknames[c-1] + } + } + return __yyfmt__.Sprintf("tok-%v", c) +} + +func yyStatname(s int) string { + if s >= 0 && s < len(yyStatenames) { + if yyStatenames[s] != "" { + return yyStatenames[s] + } + } + return __yyfmt__.Sprintf("state-%v", s) +} + +func yyErrorMessage(state, lookAhead int) string { + const TOKSTART = 4 + + if !yyErrorVerbose { + return "syntax error" + } + + for _, e := range yyErrorMessages { + if e.state == state && e.token == lookAhead { + return "syntax error: " + e.msg + } + } + + res := "syntax error: unexpected " + yyTokname(lookAhead) + + // To match Bison, suggest at most four expected tokens. + expected := make([]int, 0, 4) + + // Look for shiftable tokens. + base := yyPact[state] + for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { + if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + } + + if yyDef[state] == -2 { + i := 0 + for yyExca[i] != -1 || yyExca[i+1] != state { + i += 2 + } + + // Look for tokens that we accept or reduce. + for i += 2; yyExca[i] >= 0; i += 2 { + tok := yyExca[i] + if tok < TOKSTART || yyExca[i+1] == 0 { + continue + } + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + + // If the default action is to accept or reduce, give up. + if yyExca[i+1] != 0 { + return res + } + } + + for i, tok := range expected { + if i == 0 { + res += ", expecting " + } else { + res += " or " + } + res += yyTokname(tok) + } + return res +} + +func yylex1(lex yyLexer, lval *yySymType) (char, token int) { + token = 0 + char = lex.Lex(lval) + if char <= 0 { + token = yyTok1[0] + goto out + } + if char < len(yyTok1) { + token = yyTok1[char] + goto out + } + if char >= yyPrivate { + if char < yyPrivate+len(yyTok2) { + token = yyTok2[char-yyPrivate] + goto out + } + } + for i := 0; i < len(yyTok3); i += 2 { + token = yyTok3[i+0] + if token == char { + token = yyTok3[i+1] + goto out + } + } + +out: + if token == 0 { + token = yyTok2[1] /* unknown char */ + } + if yyDebug >= 3 { + __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) + } + return char, token +} + +func yyParse(yylex yyLexer) int { + return yyNewParser().Parse(yylex) +} + +func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int { + var yyn int + var yyVAL yySymType + var yyDollar []yySymType + _ = yyDollar // silence set and not used + yyS := yyrcvr.stack[:] + + Nerrs := 0 /* number of errors */ + Errflag := 0 /* error recovery flag */ + yystate := 0 + yyrcvr.char = -1 + yytoken := -1 // yyrcvr.char translated into internal numbering + defer func() { + // Make sure we report no lookahead when not parsing. + yystate = -1 + yyrcvr.char = -1 + yytoken = -1 + }() + yyp := -1 + goto yystack + +ret0: + return 0 + +ret1: + return 1 + +yystack: + /* put a state and value onto the stack */ + if yyDebug >= 4 { + __yyfmt__.Printf("char %v in %v\n", yyTokname(yytoken), yyStatname(yystate)) + } + + yyp++ + if yyp >= len(yyS) { + nyys := make([]yySymType, len(yyS)*2) + copy(nyys, yyS) + yyS = nyys + } + yyS[yyp] = yyVAL + yyS[yyp].yys = yystate + +yynewstate: + yyn = yyPact[yystate] + if yyn <= yyFlag { + goto yydefault /* simple state */ + } + if yyrcvr.char < 0 { + yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) + } + yyn += yytoken + if yyn < 0 || yyn >= yyLast { + goto yydefault + } + yyn = yyAct[yyn] + if yyChk[yyn] == yytoken { /* valid shift */ + yyrcvr.char = -1 + yytoken = -1 + yyVAL = yyrcvr.lval + yystate = yyn + if Errflag > 0 { + Errflag-- + } + goto yystack + } + +yydefault: + /* default state action */ + yyn = yyDef[yystate] + if yyn == -2 { + if yyrcvr.char < 0 { + yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) + } + + /* look through exception table */ + xi := 0 + for { + if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { + break + } + xi += 2 + } + for xi += 2; ; xi += 2 { + yyn = yyExca[xi+0] + if yyn < 0 || yyn == yytoken { + break + } + } + yyn = yyExca[xi+1] + if yyn < 0 { + goto ret0 + } + } + if yyn == 0 { + /* error ... attempt to resume parsing */ + switch Errflag { + case 0: /* brand new error */ + yylex.Error(yyErrorMessage(yystate, yytoken)) + Nerrs++ + if yyDebug >= 1 { + __yyfmt__.Printf("%s", yyStatname(yystate)) + __yyfmt__.Printf(" saw %s\n", yyTokname(yytoken)) + } + fallthrough + + case 1, 2: /* incompletely recovered error ... try again */ + Errflag = 3 + + /* find a state where "error" is a legal shift action */ + for yyp >= 0 { + yyn = yyPact[yyS[yyp].yys] + yyErrCode + if yyn >= 0 && yyn < yyLast { + yystate = yyAct[yyn] /* simulate a shift of "error" */ + if yyChk[yystate] == yyErrCode { + goto yystack + } + } + + /* the current p has no shift on "error", pop stack */ + if yyDebug >= 2 { + __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys) + } + yyp-- + } + /* there is no state on the stack with an error shift ... abort */ + goto ret1 + + case 3: /* no shift yet; clobber input char */ + if yyDebug >= 2 { + __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yytoken)) + } + if yytoken == yyEofCode { + goto ret1 + } + yyrcvr.char = -1 + yytoken = -1 + goto yynewstate /* try again in the same state */ + } + } + + /* reduction by production yyn */ + if yyDebug >= 2 { + __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate)) + } + + yynt := yyn + yypt := yyp + _ = yypt // guard against "declared and not used" + + yyp -= yyR2[yyn] + // yyp is now the index of $0. Perform the default action. Iff the + // reduced production is ε, $1 is possibly out of range. + if yyp+1 >= len(yyS) { + nyys := make([]yySymType, len(yyS)*2) + copy(nyys, yyS) + yyS = nyys + } + yyVAL = yyS[yyp+1] + + /* consult goto table to find next state */ + yyn = yyR1[yyn] + yyg := yyPgo[yyn] + yyj := yyg + yyS[yyp].yys + 1 + + if yyj >= yyLast { + yystate = yyAct[yyg] + } else { + yystate = yyAct[yyj] + if yyChk[yystate] != -yyn { + yystate = yyAct[yyg] + } + } + // dummy call; replaced with literal code + switch yynt { + + case 1: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:159 + { + yylex.(*input).file = &File{Stmt: yyDollar[1].exprs} + return 0 + } + case 2: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:166 + { + yyVAL.block = CodeBlock{ + Start: yyDollar[1].pos, + Statements: yyDollar[2].exprs, + End: End{Pos: yyDollar[3].pos}, + } + } + case 3: + yyDollar = yyS[yypt-0 : yypt+1] + //line build/parse.y:175 + { + yyVAL.exprs = nil + yyVAL.lastRule = nil + } + case 4: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:180 + { + // If this statement follows a comment block, + // attach the comments to the statement. + if cb, ok := yyDollar[1].lastRule.(*CommentBlock); ok { + yyVAL.exprs = yyDollar[1].exprs + yyVAL.exprs[len(yyDollar[1].exprs)-1] = yyDollar[2].expr + yyDollar[2].expr.Comment().Before = cb.After + yyVAL.lastRule = yyDollar[2].expr + break + } + + // Otherwise add to list. + yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[2].expr) + yyVAL.lastRule = yyDollar[2].expr + + // Consider this input: + // + // foo() + // # bar + // baz() + // + // If we've just parsed baz(), the # bar is attached to + // foo() as an After comment. Make it a Before comment + // for baz() instead. + if x := yyDollar[1].lastRule; x != nil { + com := x.Comment() + yyDollar[2].expr.Comment().Before = com.After + com.After = nil + } + } + case 5: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:211 + { + // Blank line; sever last rule from future comments. + yyVAL.exprs = yyDollar[1].exprs + yyVAL.lastRule = nil + } + case 6: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:217 + { + yyVAL.exprs = yyDollar[1].exprs + yyVAL.lastRule = yyDollar[1].lastRule + if yyVAL.lastRule == nil { + cb := &CommentBlock{Start: yyDollar[2].pos} + yyVAL.exprs = append(yyVAL.exprs, cb) + yyVAL.lastRule = cb + } + com := yyVAL.lastRule.Comment() + com.After = append(com.After, Comment{Start: yyDollar[2].pos, Token: yyDollar[2].tok}) + } + case 8: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:232 + { + yyVAL.expr = &PythonBlock{Start: yyDollar[1].pos, Token: yyDollar[1].tok} + } + case 12: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:242 + { + yyVAL.expr = &DotExpr{ + X: yyDollar[1].expr, + Dot: yyDollar[2].pos, + NamePos: yyDollar[3].pos, + Name: yyDollar[3].tok, + } + } + case 13: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:251 + { + yyVAL.expr = &CallExpr{ + X: yyDollar[1].expr, + ListStart: yyDollar[2].pos, + List: yyDollar[3].exprs, + End: End{Pos: yyDollar[4].pos}, + ForceCompact: forceCompact(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), + ForceMultiLine: forceMultiLine(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), + } + } + case 14: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:262 + { + yyVAL.expr = &IndexExpr{ + X: yyDollar[1].expr, + IndexStart: yyDollar[2].pos, + Y: yyDollar[3].expr, + End: yyDollar[4].pos, + } + } + case 15: + yyDollar = yyS[yypt-6 : yypt+1] + //line build/parse.y:271 + { + yyVAL.expr = &SliceExpr{ + X: yyDollar[1].expr, + SliceStart: yyDollar[2].pos, + From: yyDollar[3].expr, + FirstColon: yyDollar[4].pos, + To: yyDollar[5].expr, + End: yyDollar[6].pos, + } + } + case 16: + yyDollar = yyS[yypt-8 : yypt+1] + //line build/parse.y:282 + { + yyVAL.expr = &SliceExpr{ + X: yyDollar[1].expr, + SliceStart: yyDollar[2].pos, + From: yyDollar[3].expr, + FirstColon: yyDollar[4].pos, + To: yyDollar[5].expr, + SecondColon: yyDollar[6].pos, + Step: yyDollar[7].expr, + End: yyDollar[8].pos, + } + } + case 17: + yyDollar = yyS[yypt-5 : yypt+1] + //line build/parse.y:295 + { + yyVAL.expr = &CallExpr{ + X: yyDollar[1].expr, + ListStart: yyDollar[2].pos, + List: []Expr{ + &ListForExpr{ + Brack: "", + Start: yyDollar[2].pos, + X: yyDollar[3].expr, + For: yyDollar[4].forsifs, + End: End{Pos: yyDollar[5].pos}, + }, + }, + End: End{Pos: yyDollar[5].pos}, + } + } + case 18: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:312 + { + if len(yyDollar[1].strings) == 1 { + yyVAL.expr = yyDollar[1].strings[0] + break + } + yyVAL.expr = yyDollar[1].strings[0] + for _, x := range yyDollar[1].strings[1:] { + _, end := yyVAL.expr.Span() + yyVAL.expr = binary(yyVAL.expr, end, "+", x) + } + } + case 19: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:324 + { + yyVAL.expr = &ListExpr{ + Start: yyDollar[1].pos, + List: yyDollar[2].exprs, + Comma: yyDollar[2].comma, + End: End{Pos: yyDollar[3].pos}, + ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), + } + } + case 20: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:334 + { + exprStart, _ := yyDollar[2].expr.Span() + yyVAL.expr = &ListForExpr{ + Brack: "[]", + Start: yyDollar[1].pos, + X: yyDollar[2].expr, + For: yyDollar[3].forsifs, + End: End{Pos: yyDollar[4].pos}, + ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, + } + } + case 21: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:346 + { + exprStart, _ := yyDollar[2].expr.Span() + yyVAL.expr = &ListForExpr{ + Brack: "()", + Start: yyDollar[1].pos, + X: yyDollar[2].expr, + For: yyDollar[3].forsifs, + End: End{Pos: yyDollar[4].pos}, + ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, + } + } + case 22: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:358 + { + exprStart, _ := yyDollar[2].expr.Span() + yyVAL.expr = &ListForExpr{ + Brack: "{}", + Start: yyDollar[1].pos, + X: yyDollar[2].expr, + For: yyDollar[3].forsifs, + End: End{Pos: yyDollar[4].pos}, + ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, + } + } + case 23: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:370 + { + yyVAL.expr = &DictExpr{ + Start: yyDollar[1].pos, + List: yyDollar[2].exprs, + Comma: yyDollar[2].comma, + End: End{Pos: yyDollar[3].pos}, + ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), + } + } + case 24: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:380 + { + yyVAL.expr = &SetExpr{ + Start: yyDollar[1].pos, + List: yyDollar[2].exprs, + Comma: yyDollar[2].comma, + End: End{Pos: yyDollar[3].pos}, + ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), + } + } + case 25: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:390 + { + if len(yyDollar[2].exprs) == 1 && yyDollar[2].comma.Line == 0 { + // Just a parenthesized expression, not a tuple. + yyVAL.expr = &ParenExpr{ + Start: yyDollar[1].pos, + X: yyDollar[2].exprs[0], + End: End{Pos: yyDollar[3].pos}, + ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), + } + } else { + yyVAL.expr = &TupleExpr{ + Start: yyDollar[1].pos, + List: yyDollar[2].exprs, + Comma: yyDollar[2].comma, + End: End{Pos: yyDollar[3].pos}, + ForceCompact: forceCompact(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), + ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), + } + } + } + case 26: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:410 + { + yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) + } + case 28: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:415 + { + yyVAL.expr = &LambdaExpr{ + Lambda: yyDollar[1].pos, + Var: yyDollar[2].exprs, + Colon: yyDollar[3].pos, + Expr: yyDollar[4].expr, + } + } + case 29: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:423 + { + yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) + } + case 30: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:424 + { + yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) + } + case 31: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:425 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 32: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:426 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 33: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:427 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 34: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:428 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 35: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:429 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 36: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:430 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 37: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:431 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 38: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:432 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 39: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:433 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 40: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:434 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 41: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:435 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 42: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:436 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 43: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:437 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 44: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:438 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 45: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:439 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, "not in", yyDollar[4].expr) + } + case 46: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:440 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 47: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:441 + { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + case 48: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:443 + { + if b, ok := yyDollar[3].expr.(*UnaryExpr); ok && b.Op == "not" { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, "is not", b.X) + } else { + yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) + } + } + case 49: + yyDollar = yyS[yypt-5 : yypt+1] + //line build/parse.y:451 + { + yyVAL.expr = &ConditionalExpr{ + Then: yyDollar[1].expr, + IfStart: yyDollar[2].pos, + Test: yyDollar[3].expr, + ElseStart: yyDollar[4].pos, + Else: yyDollar[5].expr, + } + } + case 50: + yyDollar = yyS[yypt-7 : yypt+1] + //line build/parse.y:462 + { + yyVAL.expr = &FuncDef{ + Start: yyDollar[1].pos, + Name: yyDollar[2].tok, + ListStart: yyDollar[3].pos, + Args: yyDollar[4].exprs, + Body: yyDollar[7].block, + End: yyDollar[7].block.End, + ForceCompact: forceCompact(yyDollar[3].pos, yyDollar[4].exprs, yyDollar[5].pos), + ForceMultiLine: forceMultiLine(yyDollar[3].pos, yyDollar[4].exprs, yyDollar[5].pos), + } + } + case 51: + yyDollar = yyS[yypt-0 : yypt+1] + //line build/parse.y:476 + { + yyVAL.expr = nil + } + case 53: + yyDollar = yyS[yypt-0 : yypt+1] + //line build/parse.y:486 + { + yyVAL.pos = Position{} + } + case 55: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:492 + { + yyVAL.expr = &KeyValueExpr{ + Key: yyDollar[1].expr, + Colon: yyDollar[2].pos, + Value: yyDollar[3].expr, + } + } + case 56: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:502 + { + yyVAL.exprs = []Expr{yyDollar[1].expr} + } + case 57: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:506 + { + yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) + } + case 58: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:512 + { + yyVAL.exprs = yyDollar[1].exprs + } + case 59: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:516 + { + yyVAL.exprs = yyDollar[1].exprs + } + case 60: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:522 + { + yyVAL.exprs = []Expr{yyDollar[1].expr} + } + case 61: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:526 + { + yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) + } + case 62: + yyDollar = yyS[yypt-0 : yypt+1] + //line build/parse.y:531 + { + yyVAL.exprs, yyVAL.comma = nil, Position{} + } + case 63: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:535 + { + yyVAL.exprs, yyVAL.comma = yyDollar[1].exprs, yyDollar[2].pos + } + case 64: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:541 + { + yyVAL.exprs = []Expr{yyDollar[1].expr} + } + case 65: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:545 + { + yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) + } + case 66: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:551 + { + yyVAL.string = &StringExpr{ + Start: yyDollar[1].pos, + Value: yyDollar[1].str, + TripleQuote: yyDollar[1].triple, + End: yyDollar[1].pos.add(yyDollar[1].tok), + Token: yyDollar[1].tok, + } + } + case 67: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:563 + { + yyVAL.strings = []*StringExpr{yyDollar[1].string} + } + case 68: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:567 + { + yyVAL.strings = append(yyDollar[1].strings, yyDollar[2].string) + } + case 69: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:573 + { + yyVAL.expr = &LiteralExpr{Start: yyDollar[1].pos, Token: yyDollar[1].tok} + } + case 70: + yyDollar = yyS[yypt-4 : yypt+1] + //line build/parse.y:579 + { + yyVAL.forc = &ForClause{ + For: yyDollar[1].pos, + Var: yyDollar[2].exprs, + In: yyDollar[3].pos, + Expr: yyDollar[4].expr, + } + } + case 71: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:589 + { + yyVAL.forifs = &ForClauseWithIfClausesOpt{ + For: yyDollar[1].forc, + Ifs: yyDollar[2].ifs, + } + } + case 72: + yyDollar = yyS[yypt-1 : yypt+1] + //line build/parse.y:598 + { + yyVAL.forsifs = []*ForClauseWithIfClausesOpt{yyDollar[1].forifs} + } + case 73: + yyDollar = yyS[yypt-2 : yypt+1] + //line build/parse.y:601 + { + yyVAL.forsifs = append(yyDollar[1].forsifs, yyDollar[2].forifs) + } + case 74: + yyDollar = yyS[yypt-0 : yypt+1] + //line build/parse.y:606 + { + yyVAL.ifs = nil + } + case 75: + yyDollar = yyS[yypt-3 : yypt+1] + //line build/parse.y:610 + { + yyVAL.ifs = append(yyDollar[1].ifs, &IfClause{ + If: yyDollar[2].pos, + Cond: yyDollar[3].expr, + }) + } + } + goto yystack /* stack new state and value */ +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/parse_test.go b/vendor/github.com/bazelbuild/buildtools/build/parse_test.go new file mode 100644 index 00000000000..62169860512 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/parse_test.go @@ -0,0 +1,222 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package build + +import ( + "encoding/json" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "reflect" + "testing" +) + +func TestParse(t *testing.T) { + for i, tt := range parseTests { + p, err := Parse("test", []byte(tt.in)) + if err != nil { + t.Errorf("#%d: %v", i, err) + continue + } + if tt.out != nil { + compare(t, p, tt.out) + } + } +} + +func TestParseTestdata(t *testing.T) { + // Test that files in the testdata directory can all be parsed. + // For this test we don't bother checking what the tree looks like. + // The printing tests will exercise that information. + testdata := os.Getenv("TEST_SRCDIR") + "/" + os.Getenv("TEST_WORKSPACE") + "/build/testdata" + outs, err := filepath.Glob(testdata + "/*") + if err != nil { + t.Fatal(err) + } + if len(outs) == 0 { + t.Fatal("Data set is empty:", testdata) + } + for _, out := range outs { + data, err := ioutil.ReadFile(out) + if err != nil { + t.Error(err) + continue + } + + _, err = Parse(filepath.Base(out), data) + if err != nil { + t.Error(err) + } + } +} + +// toJSON returns human-readable json for the given syntax tree. +// It is used as input to diff for comparing the actual syntax tree with the expected one. +func toJSON(v interface{}) string { + s, _ := json.MarshalIndent(v, "", "\t") + s = append(s, '\n') + return string(s) +} + +// diff returns the output of running diff on b1 and b2. +func diff(b1, b2 []byte) (data []byte, err error) { + f1, err := ioutil.TempFile("", "testdiff") + if err != nil { + return nil, err + } + defer os.Remove(f1.Name()) + defer f1.Close() + + f2, err := ioutil.TempFile("", "testdiff") + if err != nil { + return nil, err + } + defer os.Remove(f2.Name()) + defer f2.Close() + + f1.Write(b1) + f2.Write(b2) + + data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + err = nil + } + return +} + +// tdiff logs the diff output to t.Error. +func tdiff(t *testing.T, a, b string) { + data, err := diff([]byte(a), []byte(b)) + if err != nil { + t.Error(err) + return + } + t.Error(string(data)) +} + +// Compare expected and actual values, failing and outputting a diff of the two values if they are not deeply equal +func compare(t *testing.T, actual, expected interface{}) { + if !reflect.DeepEqual(expected, actual) { + tdiff(t, toJSON(expected), toJSON(actual)) + } +} + +// Small tests checking that the parser returns exactly the right syntax tree. +// If out is nil, we only check that the parser accepts the file. +var parseTests = []struct { + in string + out *File +}{ + { + in: `go_binary(name = "x" +) +`, + out: &File{ + Path: "test", + Stmt: []Expr{ + &CallExpr{ + X: &LiteralExpr{ + Start: Position{1, 1, 0}, + Token: "go_binary", + }, + ListStart: Position{1, 10, 9}, + List: []Expr{ + &BinaryExpr{ + X: &LiteralExpr{ + Start: Position{1, 11, 10}, + Token: "name", + }, + OpStart: Position{1, 16, 15}, + Op: "=", + Y: &StringExpr{ + Start: Position{1, 18, 17}, + Value: "x", + End: Position{1, 21, 20}, + Token: `"x"`, + }, + }, + }, + End: End{Pos: Position{2, 1, 21}}, + ForceMultiLine: true, + }, + }, + }, + }, + { + in: `foo.bar.baz(name = "x")`, + out: &File{ + Path: "test", + Stmt: []Expr{ + &CallExpr{ + X: &DotExpr{ + X: &DotExpr{ + X: &LiteralExpr{ + Start: Position{1, 1, 0}, + Token: "foo", + }, + Dot: Position{1, 4, 3}, + NamePos: Position{1, 5, 4}, + Name: "bar", + }, + Dot: Position{1, 8, 7}, + NamePos: Position{1, 9, 8}, + Name: "baz", + }, + ListStart: Position{1, 12, 11}, + List: []Expr{ + &BinaryExpr{ + X: &LiteralExpr{ + Start: Position{1, 13, 12}, + Token: "name", + }, + OpStart: Position{1, 18, 17}, + Op: "=", + Y: &StringExpr{ + Start: Position{1, 20, 19}, + Value: "x", + End: Position{1, 23, 22}, + Token: `"x"`, + }, + }, + }, + End: End{Pos: Position{1, 23, 22}}, + }, + }, + }, + }, + { + in: `package(default_visibility = ["//visibility:legacy_public"]) +`, + }, + { + in: `__unused__ = [ foo_binary( + name = "signed_release_%sdpi" % dpi, + srcs = [ + ":aps_release_%s" % dpi, # all of Maps, obfuscated, w/o NLP + ":qlp_release_%s" % dpi, # the NLP + ":check_binmode_release", + ":check_remote_strings_release", + ], + debug_key = "//foo:bar.baz", + resources = ":R_src_release_%sdpi" % dpi) + for dpi in dpis ] +`, + }, +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/print.go b/vendor/github.com/bazelbuild/buildtools/build/print.go new file mode 100644 index 00000000000..94e754aaf51 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/print.go @@ -0,0 +1,662 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Printing of syntax trees. + +package build + +import ( + "bytes" + "fmt" + "strings" +) + +const nestedIndentation = 2 // Indentation of nested blocks +const listIndentation = 4 // Indentation of multiline expressions + +// Format returns the formatted form of the given BUILD file. +func Format(f *File) []byte { + pr := &printer{} + pr.file(f) + return pr.Bytes() +} + +// FormatString returns the string form of the given expression. +func FormatString(x Expr) string { + pr := &printer{} + switch x := x.(type) { + case *File: + pr.file(x) + default: + pr.expr(x, precLow) + } + return pr.String() +} + +// A printer collects the state during printing of a file or expression. +type printer struct { + bytes.Buffer // output buffer + comment []Comment // pending end-of-line comments + margin int // left margin (indent), a number of spaces + depth int // nesting depth inside ( ) [ ] { } +} + +// printf prints to the buffer. +func (p *printer) printf(format string, args ...interface{}) { + fmt.Fprintf(p, format, args...) +} + +// indent returns the position on the current line, in bytes, 0-indexed. +func (p *printer) indent() int { + b := p.Bytes() + n := 0 + for n < len(b) && b[len(b)-1-n] != '\n' { + n++ + } + return n +} + +// newline ends the current line, flushing end-of-line comments. +// It must only be called when printing a newline is known to be safe: +// when not inside an expression or when p.depth > 0. +// To break a line inside an expression that might not be enclosed +// in brackets of some kind, use breakline instead. +func (p *printer) newline() { + if len(p.comment) > 0 { + p.printf(" ") + for i, com := range p.comment { + if i > 0 { + p.trim() + p.printf("\n%*s", p.margin, "") + } + p.printf("%s", strings.TrimSpace(com.Token)) + } + p.comment = p.comment[:0] + } + + p.trim() + p.printf("\n%*s", p.margin, "") +} + +// breakline breaks the current line, inserting a continuation \ if needed. +// If no continuation \ is needed, breakline flushes end-of-line comments. +func (p *printer) breakline() { + if p.depth == 0 { + // Cannot have both final \ and comments. + p.printf(" \\\n%*s", p.margin, "") + return + } + + // Safe to use newline. + p.newline() +} + +// trim removes trailing spaces from the current line. +func (p *printer) trim() { + // Remove trailing space from line we're about to end. + b := p.Bytes() + n := len(b) + for n > 0 && b[n-1] == ' ' { + n-- + } + p.Truncate(n) +} + +// file formats the given file into the print buffer. +func (p *printer) file(f *File) { + for _, com := range f.Before { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + + p.statements(f.Stmt) + + for _, com := range f.After { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + + // If the last expression is in an indented code block there can be spaces in the last line. + p.trim() +} + +func (p *printer) statements(stmts []Expr) { + for i, stmt := range stmts { + switch stmt := stmt.(type) { + case *CommentBlock: + // comments already handled + + case *PythonBlock: + for _, com := range stmt.Before { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + p.printf("%s", stmt.Token) // includes trailing newline + + default: + p.expr(stmt, precLow) + + // Print an empty line break after the expression unless it's a code block. + // For a code block, the line break is generated by its last statement. + if !isCodeBlock(stmt) { + p.newline() + } + } + + for _, com := range stmt.Comment().After { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + + if i+1 < len(stmts) && !compactStmt(stmt, stmts[i+1], p.margin == 0) { + p.newline() + } + } +} + +// compactStmt reports whether the pair of statements s1, s2 +// should be printed without an intervening blank line. +// We omit the blank line when both are subinclude statements +// and the second one has no leading comments. +func compactStmt(s1, s2 Expr, isTopLevel bool) bool { + if len(s2.Comment().Before) > 0 { + return false + } + + if isTopLevel { + return isCall(s1, "load") && isCall(s2, "load") + } else { + return !(isCodeBlock(s1) || isCodeBlock(s2)) + } +} + +// isCall reports whether x is a call to a function with the given name. +func isCall(x Expr, name string) bool { + c, ok := x.(*CallExpr) + if !ok { + return false + } + nam, ok := c.X.(*LiteralExpr) + if !ok { + return false + } + return nam.Token == name +} + +// isCodeBlock checks if the statement is a code block (def, if, for, etc.) +func isCodeBlock(x Expr) bool { + switch x.(type) { + case *FuncDef: + return true + default: + return false + } +} + +// Expression formatting. + +// The expression formatter must introduce parentheses to force the +// meaning described by the parse tree. We preserve parentheses in the +// input, so extra parentheses are only needed if we have edited the tree. +// +// For example consider these expressions: +// (1) "x" "y" % foo +// (2) "x" + "y" % foo +// (3) "x" + ("y" % foo) +// (4) ("x" + "y") % foo +// When we parse (1), we represent the concatenation as an addition. +// However, if we print the addition back out without additional parens, +// as in (2), it has the same meaning as (3), which is not the original +// meaning. To preserve the original meaning we must add parens as in (4). +// +// To allow arbitrary rewrites to be formatted properly, we track full +// operator precedence while printing instead of just handling this one +// case of string concatenation. +// +// The precedences are assigned values low to high. A larger number +// binds tighter than a smaller number. All binary operators bind +// left-to-right. +const ( + precLow = iota + precAssign + precComma + precColon + precIn + precOr + precAnd + precCmp + precAdd + precMultiply + precSuffix + precUnary + precConcat +) + +// opPrec gives the precedence for operators found in a BinaryExpr. +var opPrec = map[string]int{ + "=": precAssign, + "+=": precAssign, + "or": precOr, + "and": precAnd, + "<": precCmp, + ">": precCmp, + "==": precCmp, + "!=": precCmp, + "<=": precCmp, + ">=": precCmp, + "+": precAdd, + "-": precAdd, + "*": precMultiply, + "/": precMultiply, + "%": precMultiply, +} + +// expr prints the expression v to the print buffer. +// The value outerPrec gives the precedence of the operator +// outside expr. If that operator binds tighter than v's operator, +// expr must introduce parentheses to preserve the meaning +// of the parse tree (see above). +func (p *printer) expr(v Expr, outerPrec int) { + // Emit line-comments preceding this expression. + // If we are in the middle of an expression but not inside ( ) [ ] { } + // then we cannot just break the line: we'd have to end it with a \. + // However, even then we can't emit line comments since that would + // end the expression. This is only a concern if we have rewritten + // the parse tree. If comments were okay before this expression in + // the original input they're still okay now, in the absense of rewrites. + // + // TODO(bazel-team): Check whether it is valid to emit comments right now, + // and if not, insert them earlier in the output instead, at the most + // recent \n not following a \ line. + if before := v.Comment().Before; len(before) > 0 { + // Want to print a line comment. + // Line comments must be at the current margin. + p.trim() + if p.indent() > 0 { + // There's other text on the line. Start a new line. + p.printf("\n") + } + // Re-indent to margin. + p.printf("%*s", p.margin, "") + for _, com := range before { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + } + + // Do we introduce parentheses? + // The result depends on the kind of expression. + // Each expression type that might need parentheses + // calls addParen with its own precedence. + // If parentheses are necessary, addParen prints the + // opening parenthesis and sets parenthesized so that + // the code after the switch can print the closing one. + parenthesized := false + addParen := func(prec int) { + if prec < outerPrec { + p.printf("(") + p.depth++ + parenthesized = true + } + } + + switch v := v.(type) { + default: + panic(fmt.Errorf("printer: unexpected type %T", v)) + + case *LiteralExpr: + p.printf("%s", v.Token) + + case *StringExpr: + // If the Token is a correct quoting of Value, use it. + // This preserves the specific escaping choices that + // BUILD authors have made, and it also works around + // b/7272572. + if strings.HasPrefix(v.Token, `"`) { + s, triple, err := unquote(v.Token) + if s == v.Value && triple == v.TripleQuote && err == nil { + p.printf("%s", v.Token) + break + } + } + + p.printf("%s", quote(v.Value, v.TripleQuote)) + + case *DotExpr: + addParen(precSuffix) + p.expr(v.X, precSuffix) + p.printf(".%s", v.Name) + + case *IndexExpr: + addParen(precSuffix) + p.expr(v.X, precSuffix) + p.printf("[") + p.expr(v.Y, precLow) + p.printf("]") + + case *KeyValueExpr: + p.expr(v.Key, precLow) + p.printf(": ") + p.expr(v.Value, precLow) + + case *SliceExpr: + addParen(precSuffix) + p.expr(v.X, precSuffix) + p.printf("[") + if v.From != nil { + p.expr(v.From, precLow) + } + p.printf(":") + if v.To != nil { + p.expr(v.To, precLow) + } + if v.SecondColon.Byte != 0 { + p.printf(":") + if v.Step != nil { + p.expr(v.Step, precLow) + } + } + p.printf("]") + + case *UnaryExpr: + addParen(precUnary) + if v.Op == "not" { + p.printf("not ") // Requires a space after it. + } else { + p.printf("%s", v.Op) + } + p.expr(v.X, precUnary) + + case *LambdaExpr: + addParen(precColon) + p.printf("lambda ") + for i, name := range v.Var { + if i > 0 { + p.printf(", ") + } + p.expr(name, precLow) + } + p.printf(": ") + p.expr(v.Expr, precColon) + + case *BinaryExpr: + // Precedence: use the precedence of the operator. + // Since all binary expressions format left-to-right, + // it is okay for the left side to reuse the same operator + // without parentheses, so we use prec for v.X. + // For the same reason, the right side cannot reuse the same + // operator, or else a parse tree for a + (b + c), where the ( ) are + // not present in the source, will format as a + b + c, which + // means (a + b) + c. Treat the right expression as appearing + // in a context one precedence level higher: use prec+1 for v.Y. + // + // Line breaks: if we are to break the line immediately after + // the operator, introduce a margin at the current column, + // so that the second operand lines up with the first one and + // also so that neither operand can use space to the left. + // If the operator is an =, indent the right side another 4 spaces. + prec := opPrec[v.Op] + addParen(prec) + m := p.margin + if v.LineBreak { + p.margin = p.indent() + if v.Op == "=" { + p.margin += listIndentation + } + } + + p.expr(v.X, prec) + p.printf(" %s", v.Op) + if v.LineBreak { + p.breakline() + } else { + p.printf(" ") + } + p.expr(v.Y, prec+1) + p.margin = m + + case *ParenExpr: + p.seq("()", []Expr{v.X}, &v.End, modeParen, false, v.ForceMultiLine) + + case *CallExpr: + addParen(precSuffix) + p.expr(v.X, precSuffix) + p.seq("()", v.List, &v.End, modeCall, v.ForceCompact, v.ForceMultiLine) + + case *ListExpr: + p.seq("[]", v.List, &v.End, modeList, false, v.ForceMultiLine) + + case *SetExpr: + p.seq("{}", v.List, &v.End, modeList, false, v.ForceMultiLine) + + case *TupleExpr: + p.seq("()", v.List, &v.End, modeTuple, v.ForceCompact, v.ForceMultiLine) + + case *DictExpr: + var list []Expr + for _, x := range v.List { + list = append(list, x) + } + p.seq("{}", list, &v.End, modeDict, false, v.ForceMultiLine) + + case *ListForExpr: + p.listFor(v) + + case *ConditionalExpr: + addParen(precSuffix) + p.expr(v.Then, precSuffix) + p.printf(" if ") + p.expr(v.Test, precSuffix) + p.printf(" else ") + p.expr(v.Else, precSuffix) + + case *FuncDef: + p.printf("def ") + p.printf(v.Name) + p.seq("()", v.Args, &v.End, modeTuple, v.ForceCompact, v.ForceMultiLine) + p.printf(":") + p.margin += nestedIndentation + p.newline() + p.statements(v.Body.Statements) + p.margin -= nestedIndentation + } + + // Add closing parenthesis if needed. + if parenthesized { + p.depth-- + p.printf(")") + } + + // Queue end-of-line comments for printing when we + // reach the end of the line. + p.comment = append(p.comment, v.Comment().Suffix...) +} + +// A seqMode describes a formatting mode for a sequence of values, +// like a list or call arguments. +type seqMode int + +const ( + _ seqMode = iota + + modeCall // f(x) + modeList // [x] + modeTuple // (x,) + modeParen // (x) + modeDict // {x:y} +) + +// seq formats a list of values inside a given bracket pair (brack = "()", "[]", "{}"). +// The end node holds any trailing comments to be printed just before the +// closing bracket. +// The mode parameter specifies the sequence mode (see above). +// If multiLine is true, seq avoids the compact form even +// for 0- and 1-element sequences. +func (p *printer) seq(brack string, list []Expr, end *End, mode seqMode, forceCompact, forceMultiLine bool) { + p.printf("%s", brack[:1]) + p.depth++ + + // If there are line comments, force multiline + // so we can print the comments before the closing bracket. + for _, x := range list { + if len(x.Comment().Before) > 0 { + forceMultiLine = true + } + } + if len(end.Before) > 0 { + forceMultiLine = true + } + + // Resolve possibly ambiguous call arguments explicitly + // instead of depending on implicit resolution in logic below. + if forceMultiLine { + forceCompact = false + } + + switch { + case len(list) == 0 && !forceMultiLine: + // Compact form: print nothing. + + case len(list) == 1 && !forceMultiLine: + // Compact form. + p.expr(list[0], precLow) + // Tuple must end with comma, to mark it as a tuple. + if mode == modeTuple { + p.printf(",") + } + + case forceCompact: + // Compact form but multiple elements. + for i, x := range list { + if i > 0 { + p.printf(", ") + } + p.expr(x, precLow) + } + + default: + // Multi-line form. + p.margin += listIndentation + for i, x := range list { + // If we are about to break the line before the first + // element and there are trailing end-of-line comments + // waiting to be printed, delay them and print them as + // whole-line comments preceding that element. + // Do this by printing a newline ourselves and positioning + // so that the end-of-line comment, with the two spaces added, + // will line up with the current margin. + if i == 0 && len(p.comment) > 0 { + p.printf("\n%*s", p.margin-2, "") + } + + p.newline() + p.expr(x, precLow) + if mode != modeParen || i+1 < len(list) { + p.printf(",") + } + } + // Final comments. + for _, com := range end.Before { + p.newline() + p.printf("%s", strings.TrimSpace(com.Token)) + } + p.margin -= listIndentation + p.newline() + } + p.depth-- + p.printf("%s", brack[1:]) +} + +// listFor formats a ListForExpr (list comprehension). +// The single-line form is: +// [x for y in z if c] +// +// and the multi-line form is: +// [ +// x +// for y in z +// if c +// ] +// +func (p *printer) listFor(v *ListForExpr) { + multiLine := v.ForceMultiLine || len(v.End.Before) > 0 + + // space breaks the line in multiline mode + // or else prints a space. + space := func() { + if multiLine { + p.breakline() + } else { + p.printf(" ") + } + } + + if v.Brack != "" { + p.depth++ + p.printf("%s", v.Brack[:1]) + } + + if multiLine { + if v.Brack != "" { + p.margin += listIndentation + } + p.newline() + } + + p.expr(v.X, precLow) + + for _, c := range v.For { + space() + p.printf("for ") + for i, name := range c.For.Var { + if i > 0 { + p.printf(", ") + } + p.expr(name, precLow) + } + p.printf(" in ") + p.expr(c.For.Expr, precLow) + p.comment = append(p.comment, c.For.Comment().Suffix...) + + for _, i := range c.Ifs { + space() + p.printf("if ") + p.expr(i.Cond, precLow) + p.comment = append(p.comment, i.Comment().Suffix...) + } + p.comment = append(p.comment, c.Comment().Suffix...) + + } + + if multiLine { + for _, com := range v.End.Before { + p.newline() + p.printf("%s", strings.TrimSpace(com.Token)) + } + if v.Brack != "" { + p.margin -= listIndentation + } + p.newline() + } + + if v.Brack != "" { + p.printf("%s", v.Brack[1:]) + p.depth-- + } +} + +func (p *printer) isTopLevel() bool { + return p.margin == 0 +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/print_test.go b/vendor/github.com/bazelbuild/buildtools/build/print_test.go new file mode 100644 index 00000000000..6143358daea --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/print_test.go @@ -0,0 +1,311 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package build + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "reflect" + "strings" + "testing" + + "github.com/bazelbuild/buildtools/tables" +) + +// exists reports whether the named file exists. +func exists(name string) bool { + _, err := os.Stat(name) + return err == nil +} + +// Test that reading and then writing the golden files +// does not change their output. +func TestPrintGolden(t *testing.T) { + outs, chdir := findTests(t, ".golden") + defer chdir() + for _, out := range outs { + if strings.Contains(out, ".stripslashes.") { + tables.StripLabelLeadingSlashes = true + } + if strings.Contains(out, ".formatbzl.") { + tables.FormatBzlFiles = true + } + // Test file 050 tests the ShortenAbsoluteLabelsToRelative behavior, all other tests assume that ShortenAbsoluteLabelsToRelative is false. + if strings.Contains(out, "/050.") { + tables.ShortenAbsoluteLabelsToRelative = true + } + testPrint(t, out, out, false) + tables.StripLabelLeadingSlashes = false + tables.ShortenAbsoluteLabelsToRelative = false + tables.FormatBzlFiles = false + } +} + +// Test that formatting the input files produces the golden files. +func TestPrintRewrite(t *testing.T) { + ins, chdir := findTests(t, ".in") + defer chdir() + for _, in := range ins { + prefix := in[:len(in)-len(".in")] + out := prefix + ".golden" + + // Test file 050 tests the ShortenAbsoluteLabelsToRelative behavior, all other tests assume that ShortenAbsoluteLabelsToRelative is false. + if strings.Contains(out, "/050.") { + tables.ShortenAbsoluteLabelsToRelative = true + } + + testPrint(t, in, out, true) + strippedOut := prefix + ".stripslashes.golden" + if exists(strippedOut) { + tables.StripLabelLeadingSlashes = true + testPrint(t, in, strippedOut, true) + tables.StripLabelLeadingSlashes = false + } + + bzl := prefix + ".formatbzl.golden" + if exists(bzl) { + tables.FormatBzlFiles = true + testPrint(t, in, bzl, true) + tables.FormatBzlFiles = false + } + + tables.ShortenAbsoluteLabelsToRelative = false + } +} + +// findTests finds all files of the passed suffix in the build/testdata directory. +// It changes the working directory to be the directory containing the `testdata` directory, +// and returns a function to call to change back to the current directory. +// This allows tests to assert on alias finding between absolute and relative labels. +func findTests(t *testing.T, suffix string) ([]string, func()) { + wd, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + if err := os.Chdir(filepath.Join(os.Getenv("TEST_SRCDIR"), os.Getenv("TEST_WORKSPACE"), "build")); err != nil { + t.Fatal(err) + } + outs, err := filepath.Glob("testdata/*" + suffix) + if err != nil { + t.Fatal(err) + } + if len(outs) == 0 { + t.Fatal("Didn't find any test cases") + } + return outs, func() { os.Chdir(wd) } +} + +// testPrint is a helper for testing the printer. +// It reads the file named in, reformats it, and compares +// the result to the file named out. If rewrite is true, the +// reformatting includes buildifier's higher-level rewrites. +func testPrint(t *testing.T, in, out string, rewrite bool) { + data, err := ioutil.ReadFile(in) + if err != nil { + t.Error(err) + return + } + + golden, err := ioutil.ReadFile(out) + if err != nil { + t.Error(err) + return + } + + base := "testdata/" + filepath.Base(in) + bld, err := Parse(base, data) + if err != nil { + t.Error(err) + return + } + + if rewrite { + Rewrite(bld, nil) + } + + ndata := Format(bld) + + if !bytes.Equal(ndata, golden) { + t.Errorf("formatted %s incorrectly: diff shows -%s, +ours", base, filepath.Base(out)) + tdiff(t, string(golden), string(ndata)) + return + } +} + +// Test that when files in the testdata directory are parsed +// and printed and parsed again, we get the same parse tree +// both times. +func TestPrintParse(t *testing.T) { + outs, chdir := findTests(t, "") + defer chdir() + for _, out := range outs { + data, err := ioutil.ReadFile(out) + if err != nil { + t.Error(err) + continue + } + + base := "testdata/" + filepath.Base(out) + f, err := Parse(base, data) + if err != nil { + t.Errorf("parsing original: %v", err) + } + + ndata := Format(f) + + f2, err := Parse(base, ndata) + if err != nil { + t.Errorf("parsing reformatted: %v", err) + } + + eq := eqchecker{file: base} + if err := eq.check(f, f2); err != nil { + t.Errorf("not equal: %v", err) + } + } +} + +// An eqchecker holds state for checking the equality of two parse trees. +type eqchecker struct { + file string + pos Position +} + +// errorf returns an error described by the printf-style format and arguments, +// inserting the current file position before the error text. +func (eq *eqchecker) errorf(format string, args ...interface{}) error { + return fmt.Errorf("%s:%d: %s", eq.file, eq.pos.Line, + fmt.Sprintf(format, args...)) +} + +// check checks that v and w represent the same parse tree. +// If not, it returns an error describing the first difference. +func (eq *eqchecker) check(v, w interface{}) error { + return eq.checkValue(reflect.ValueOf(v), reflect.ValueOf(w)) +} + +var ( + posType = reflect.TypeOf(Position{}) + commentsType = reflect.TypeOf(Comments{}) + parenType = reflect.TypeOf((*ParenExpr)(nil)) + stringExprType = reflect.TypeOf(StringExpr{}) +) + +// checkValue checks that v and w represent the same parse tree. +// If not, it returns an error describing the first difference. +func (eq *eqchecker) checkValue(v, w reflect.Value) error { + // inner returns the innermost expression for v. + // If v is a parenthesized expression (X) it returns x. + // if v is a non-nil interface value, it returns the concrete + // value in the interface. + inner := func(v reflect.Value) reflect.Value { + for v.IsValid() { + if v.Type() == parenType { + v = v.Elem().FieldByName("X") + continue + } + if v.Kind() == reflect.Interface && !v.IsNil() { + v = v.Elem() + continue + } + break + } + return v + } + + v = inner(v) + w = inner(w) + + if v.Kind() != w.Kind() { + return eq.errorf("%s became %s", v.Kind(), w.Kind()) + } + + // There is nothing to compare for zero values, so exit early. + if !v.IsValid() { + return nil + } + + if v.Type() != w.Type() { + return eq.errorf("%s became %s", v.Type(), w.Type()) + } + + if p, ok := v.Interface().(Expr); ok { + eq.pos, _ = p.Span() + } + + switch v.Kind() { + default: + return eq.errorf("unexpected type %s", v.Type()) + + case reflect.Bool, reflect.Int, reflect.String: + vi := v.Interface() + wi := w.Interface() + if vi != wi { + return eq.errorf("%v became %v", vi, wi) + } + + case reflect.Slice: + vl := v.Len() + wl := w.Len() + for i := 0; i < vl || i < wl; i++ { + if i >= vl { + return eq.errorf("unexpected %s", w.Index(i).Type()) + } + if i >= wl { + return eq.errorf("missing %s", v.Index(i).Type()) + } + if err := eq.checkValue(v.Index(i), w.Index(i)); err != nil { + return err + } + } + + case reflect.Struct: + // Fields in struct must match. + t := v.Type() + n := t.NumField() + for i := 0; i < n; i++ { + tf := t.Field(i) + switch { + default: + if err := eq.checkValue(v.Field(i), w.Field(i)); err != nil { + return err + } + + case tf.Type == posType: // ignore positions + case tf.Type == commentsType: // ignore comment assignment + case tf.Name == "MultiLine": // ignore multiline setting + case tf.Name == "LineBreak": // ignore line break setting + case t == stringExprType && tf.Name == "Token": // ignore raw string token + } + } + + case reflect.Ptr, reflect.Interface: + if v.IsNil() != w.IsNil() { + if v.IsNil() { + return eq.errorf("unexpected %s", w.Elem().Type()) + } + return eq.errorf("missing %s", v.Elem().Type()) + } + if err := eq.checkValue(v.Elem(), w.Elem()); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/quote.go b/vendor/github.com/bazelbuild/buildtools/build/quote.go new file mode 100644 index 00000000000..d5ffe8d4525 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/quote.go @@ -0,0 +1,262 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Python quoted strings. + +package build + +import ( + "bytes" + "fmt" + "strconv" + "strings" +) + +// unesc maps single-letter chars following \ to their actual values. +var unesc = [256]byte{ + 'a': '\a', + 'b': '\b', + 'f': '\f', + 'n': '\n', + 'r': '\r', + 't': '\t', + 'v': '\v', + '\\': '\\', + '\'': '\'', + '"': '"', +} + +// esc maps escape-worthy bytes to the char that should follow \. +var esc = [256]byte{ + '\a': 'a', + '\b': 'b', + '\f': 'f', + '\n': 'n', + '\r': 'r', + '\t': 't', + '\v': 'v', + '\\': '\\', + '\'': '\'', + '"': '"', +} + +// notEsc is a list of characters that can follow a \ in a string value +// without having to escape the \. That is, since ( is in this list, we +// quote the Go string "foo\\(bar" as the Python literal "foo\(bar". +// This really does happen in BUILD files, especially in strings +// being used as shell arguments containing regular expressions. +const notEsc = " !#$%&()*+,-./:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~" + +// unquote unquotes the quoted string, returning the actual +// string value, whether the original was triple-quoted, and +// an error describing invalid input. +func unquote(quoted string) (s string, triple bool, err error) { + // Check for raw prefix: means don't interpret the inner \. + raw := false + if strings.HasPrefix(quoted, "r") { + raw = true + quoted = quoted[1:] + } + + if len(quoted) < 2 { + err = fmt.Errorf("string literal too short") + return + } + + if quoted[0] != '"' && quoted[0] != '\'' || quoted[0] != quoted[len(quoted)-1] { + err = fmt.Errorf("string literal has invalid quotes") + } + + // Check for triple quoted string. + quote := quoted[0] + if len(quoted) >= 6 && quoted[1] == quote && quoted[2] == quote && quoted[:3] == quoted[len(quoted)-3:] { + triple = true + quoted = quoted[3 : len(quoted)-3] + } else { + quoted = quoted[1 : len(quoted)-1] + } + + // Now quoted is the quoted data, but no quotes. + // If we're in raw mode or there are no escapes, we're done. + if raw || !strings.Contains(quoted, `\`) { + s = quoted + return + } + + // Otherwise process quoted string. + // Each iteration processes one escape sequence along with the + // plain text leading up to it. + var buf bytes.Buffer + for { + // Remove prefix before escape sequence. + i := strings.Index(quoted, `\`) + if i < 0 { + i = len(quoted) + } + buf.WriteString(quoted[:i]) + quoted = quoted[i:] + + if len(quoted) == 0 { + break + } + + // Process escape sequence. + if len(quoted) == 1 { + err = fmt.Errorf(`truncated escape sequence \`) + return + } + + switch quoted[1] { + default: + // In Python, if \z (for some byte z) is not a known escape sequence + // then it appears as literal text in the string. + buf.WriteString(quoted[:2]) + quoted = quoted[2:] + + case '\n': + // Ignore the escape and the line break. + quoted = quoted[2:] + + case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '\'', '"': + // One-char escape + buf.WriteByte(unesc[quoted[1]]) + quoted = quoted[2:] + + case '0', '1', '2', '3', '4', '5', '6', '7': + // Octal escape, up to 3 digits. + n := int(quoted[1] - '0') + quoted = quoted[2:] + for i := 1; i < 3; i++ { + if len(quoted) == 0 || quoted[0] < '0' || '7' < quoted[0] { + break + } + n = n*8 + int(quoted[0]-'0') + quoted = quoted[1:] + } + if n >= 256 { + // NOTE: Python silently discards the high bit, + // so that '\541' == '\141' == 'a'. + // Let's see if we can avoid doing that in BUILD files. + err = fmt.Errorf(`invalid escape sequence \%03o`, n) + return + } + buf.WriteByte(byte(n)) + + case 'x': + // Hexadecimal escape, exactly 2 digits. + if len(quoted) < 4 { + err = fmt.Errorf(`truncated escape sequence %s`, quoted) + return + } + n, err1 := strconv.ParseInt(quoted[2:4], 16, 0) + if err1 != nil { + err = fmt.Errorf(`invalid escape sequence %s`, quoted[:4]) + return + } + buf.WriteByte(byte(n)) + quoted = quoted[4:] + } + } + + s = buf.String() + return +} + +// indexByte returns the index of the first instance of b in s, or else -1. +func indexByte(s string, b byte) int { + for i := 0; i < len(s); i++ { + if s[i] == b { + return i + } + } + return -1 +} + +// hex is a list of the hexadecimal digits, for use in quoting. +// We always print lower-case hexadecimal. +const hex = "0123456789abcdef" + +// quote returns the quoted form of the string value "x". +// If triple is true, quote uses the triple-quoted form """x""". +func quote(unquoted string, triple bool) string { + q := `"` + if triple { + q = `"""` + } + + var buf bytes.Buffer + buf.WriteString(q) + + for i := 0; i < len(unquoted); i++ { + c := unquoted[i] + if c == '"' && triple && (i+1 < len(unquoted) && unquoted[i+1] != '"' || i+2 < len(unquoted) && unquoted[i+2] != '"') { + // Can pass up to two quotes through, because they are followed by a non-quote byte. + buf.WriteByte(c) + if i+1 < len(unquoted) && unquoted[i+1] == '"' { + buf.WriteByte(c) + i++ + } + continue + } + if triple && c == '\n' { + // Can allow newline in triple-quoted string. + buf.WriteByte(c) + continue + } + if c == '\'' { + // Can allow ' since we always use ". + buf.WriteByte(c) + continue + } + if c == '\\' { + if i+1 < len(unquoted) && indexByte(notEsc, unquoted[i+1]) >= 0 { + // Can pass \ through when followed by a byte that + // known not to be a valid escape sequence and also + // that does not trigger an escape sequence of its own. + // Use this, because various BUILD files do. + buf.WriteByte('\\') + buf.WriteByte(unquoted[i+1]) + i++ + continue + } + } + if esc[c] != 0 { + buf.WriteByte('\\') + buf.WriteByte(esc[c]) + continue + } + if c < 0x20 || c >= 0x80 { + // BUILD files are supposed to be Latin-1, so escape all control and high bytes. + // I'd prefer to use \x here, but Blaze does not implement + // \x in quoted strings (b/7272572). + buf.WriteByte('\\') + buf.WriteByte(hex[c>>6]) // actually octal but reusing hex digits 0-7. + buf.WriteByte(hex[(c>>3)&7]) + buf.WriteByte(hex[c&7]) + /* + buf.WriteByte('\\') + buf.WriteByte('x') + buf.WriteByte(hex[c>>4]) + buf.WriteByte(hex[c&0xF]) + */ + continue + } + buf.WriteByte(c) + continue + } + + buf.WriteString(q) + return buf.String() +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/quote_test.go b/vendor/github.com/bazelbuild/buildtools/build/quote_test.go new file mode 100644 index 00000000000..1b52262604e --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/quote_test.go @@ -0,0 +1,83 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package build + +import ( + "strings" + "testing" +) + +var quoteTests = []struct { + q string // quoted + s string // unquoted (actual string) + std bool // q is standard form for s +}{ + {`""`, "", true}, + {`''`, "", false}, + {`"hello"`, `hello`, true}, + {`'hello'`, `hello`, false}, + {`"quote\"here"`, `quote"here`, true}, + {`'quote\"here'`, `quote"here`, false}, + {`'quote"here'`, `quote"here`, false}, + {`"quote'here"`, `quote'here`, true}, + {`"quote\'here"`, `quote'here`, false}, + {`'quote\'here'`, `quote'here`, false}, + {`"""hello " ' world "" asdf ''' foo"""`, `hello " ' world "" asdf ''' foo`, true}, + {`"foo\(bar"`, `foo\(bar`, true}, + {`"""hello +world"""`, "hello\nworld", true}, + + {`"\a\b\f\n\r\t\v\000\377"`, "\a\b\f\n\r\t\v\000\xFF", true}, + {`"\a\b\f\n\r\t\v\x00\xff"`, "\a\b\f\n\r\t\v\000\xFF", false}, + {`"\a\b\f\n\r\t\v\000\xFF"`, "\a\b\f\n\r\t\v\000\xFF", false}, + {`"\a\b\f\n\r\t\v\000\377\"'\\\003\200"`, "\a\b\f\n\r\t\v\x00\xFF\"'\\\x03\x80", true}, + {`"\a\b\f\n\r\t\v\x00\xff\"'\\\x03\x80"`, "\a\b\f\n\r\t\v\x00\xFF\"'\\\x03\x80", false}, + {`"\a\b\f\n\r\t\v\000\xFF\"'\\\x03\x80"`, "\a\b\f\n\r\t\v\x00\xFF\"'\\\x03\x80", false}, + {`"\a\b\f\n\r\t\v\000\xFF\"\'\\\x03\x80"`, "\a\b\f\n\r\t\v\x00\xFF\"'\\\x03\x80", false}, + { + `"cat $(SRCS) | grep '\s*ip_block:' | sed -e 's/\s*ip_block: \"\([^ ]*\)\"/ \x27\\1\x27,/g' >> $@; "`, + "cat $(SRCS) | grep '\\s*ip_block:' | sed -e 's/\\s*ip_block: \"\\([^ ]*\\)\"/ '\\1',/g' >> $@; ", + false, + }, + { + `"cat $(SRCS) | grep '\\s*ip_block:' | sed -e 's/\\s*ip_block: \"\([^ ]*\)\"/ '\\1',/g' >> $@; "`, + "cat $(SRCS) | grep '\\s*ip_block:' | sed -e 's/\\s*ip_block: \"\\([^ ]*\\)\"/ '\\1',/g' >> $@; ", + true, + }, +} + +func TestQuote(t *testing.T) { + for _, tt := range quoteTests { + if !tt.std { + continue + } + q := quote(tt.s, strings.HasPrefix(tt.q, `"""`)) + if q != tt.q { + t.Errorf("quote(%#q) = %s, want %s", tt.s, q, tt.q) + } + } +} + +func TestUnquote(t *testing.T) { + for _, tt := range quoteTests { + s, triple, err := unquote(tt.q) + wantTriple := strings.HasPrefix(tt.q, `"""`) || strings.HasPrefix(tt.q, `'''`) + if s != tt.s || triple != wantTriple || err != nil { + t.Errorf("unquote(%s) = %#q, %v, %v want %#q, %v, nil", tt.q, s, triple, err, tt.s, wantTriple) + } + } +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/rewrite.go b/vendor/github.com/bazelbuild/buildtools/build/rewrite.go new file mode 100644 index 00000000000..b7f331a3e06 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/rewrite.go @@ -0,0 +1,817 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Rewriting of high-level (not purely syntactic) BUILD constructs. + +package build + +import ( + "path" + "regexp" + "sort" + "strings" + + "github.com/bazelbuild/buildtools/tables" +) + +// For debugging: flag to disable certain rewrites. +var DisableRewrites []string + +// disabled reports whether the named rewrite is disabled. +func disabled(name string) bool { + for _, x := range DisableRewrites { + if name == x { + return true + } + } + return false +} + +// For debugging: allow sorting of these lists even with sorting otherwise disabled. +var AllowSort []string + +// allowedSort reports whether sorting is allowed in the named context. +func allowedSort(name string) bool { + for _, x := range AllowSort { + if name == x { + return true + } + } + return false +} + +// Rewrite applies the high-level Buildifier rewrites to f, modifying it in place. +// If info is non-nil, Rewrite updates it with information about the rewrite. +func Rewrite(f *File, info *RewriteInfo) { + // Allocate an info so that helpers can assume it's there. + if info == nil { + info = new(RewriteInfo) + } + + for _, r := range rewrites { + if !disabled(r.name) { + r.fn(f, info) + } + } +} + +// RewriteInfo collects information about what Rewrite did. +type RewriteInfo struct { + EditLabel int // number of label strings edited + NameCall int // number of calls with argument names added + SortCall int // number of call argument lists sorted + SortStringList int // number of string lists sorted + UnsafeSort int // number of unsafe string lists sorted + Log []string // log entries - may change +} + +func (info *RewriteInfo) String() string { + s := "" + if info.EditLabel > 0 { + s += " label" + } + if info.NameCall > 0 { + s += " callname" + } + if info.SortCall > 0 { + s += " callsort" + } + if info.SortStringList > 0 { + s += " listsort" + } + if info.UnsafeSort > 0 { + s += " unsafesort" + } + if s != "" { + s = s[1:] + } + return s +} + +// rewrites is the list of all Buildifier rewrites, in the order in which they are applied. +// The order here matters: for example, label canonicalization must happen +// before sorting lists of strings. +var rewrites = []struct { + name string + fn func(*File, *RewriteInfo) +}{ + {"callsort", sortCallArgs}, + {"label", fixLabels}, + {"listsort", sortStringLists}, + {"multiplus", fixMultilinePlus}, +} + +// leaveAlone reports whether any of the nodes on the stack are marked +// with a comment containing "buildifier: leave-alone". +func leaveAlone(stk []Expr, final Expr) bool { + for _, x := range stk { + if leaveAlone1(x) { + return true + } + } + if final != nil && leaveAlone1(final) { + return true + } + return false +} + +// hasComment reports whether x is marked with a comment that +// after being converted to lower case, contains the specified text. +func hasComment(x Expr, text string) bool { + for _, com := range x.Comment().Before { + if strings.Contains(strings.ToLower(com.Token), text) { + return true + } + } + return false +} + +// leaveAlone1 reports whether x is marked with a comment containing +// "buildifier: leave-alone", case-insensitive. +func leaveAlone1(x Expr) bool { + return hasComment(x, "buildifier: leave-alone") +} + +// doNotSort reports whether x is marked with a comment containing +// "do not sort", case-insensitive. +func doNotSort(x Expr) bool { + return hasComment(x, "do not sort") +} + +// keepSorted reports whether x is marked with a comment containing +// "keep sorted", case-insensitive. +func keepSorted(x Expr) bool { + return hasComment(x, "keep sorted") +} + +// fixLabels rewrites labels into a canonical form. +// +// First, it joins labels written as string addition, turning +// "//x" + ":y" (usually split across multiple lines) into "//x:y". +// +// Second, it removes redundant target qualifiers, turning labels like +// "//third_party/m4:m4" into "//third_party/m4" as well as ones like +// "@foo//:foo" into "@foo". +// +func fixLabels(f *File, info *RewriteInfo) { + joinLabel := func(p *Expr) { + add, ok := (*p).(*BinaryExpr) + if !ok || add.Op != "+" { + return + } + str1, ok := add.X.(*StringExpr) + if !ok || !strings.HasPrefix(str1.Value, "//") || strings.Contains(str1.Value, " ") { + return + } + str2, ok := add.Y.(*StringExpr) + if !ok || strings.Contains(str2.Value, " ") { + return + } + info.EditLabel++ + str1.Value += str2.Value + + // Deleting nodes add and str2. + // Merge comments from add, str1, and str2 and save in str1. + com1 := add.Comment() + com2 := str1.Comment() + com3 := str2.Comment() + com1.Before = append(com1.Before, com2.Before...) + com1.Before = append(com1.Before, com3.Before...) + com1.Suffix = append(com1.Suffix, com2.Suffix...) + com1.Suffix = append(com1.Suffix, com3.Suffix...) + *str1.Comment() = *com1 + + *p = str1 + } + + labelPrefix := "//" + if tables.StripLabelLeadingSlashes { + labelPrefix = "" + } + // labelRE matches label strings, e.g. @r//x/y/z:abc + // where $1 is @r//x/y/z, $2 is @r//, $3 is r, $4 is z, $5 is abc. + labelRE := regexp.MustCompile(`^(((?:@(\w+))?//|` + labelPrefix + `)(?:.+/)?([^:]*))(?::([^:]+))?$`) + + shortenLabel := func(v Expr) { + str, ok := v.(*StringExpr) + if !ok { + return + } + editPerformed := false + + if tables.StripLabelLeadingSlashes && strings.HasPrefix(str.Value, "//") { + if path.Dir(f.Path) == "." || !strings.HasPrefix(str.Value, "//:") { + editPerformed = true + str.Value = str.Value[2:] + } + } + + if tables.ShortenAbsoluteLabelsToRelative { + thisPackage := labelPrefix + path.Dir(f.Path) + if str.Value == thisPackage { + editPerformed = true + str.Value = ":" + path.Base(str.Value) + } else if strings.HasPrefix(str.Value, thisPackage+":") { + editPerformed = true + str.Value = str.Value[len(thisPackage):] + } + } + + m := labelRE.FindStringSubmatch(str.Value) + if m == nil { + return + } + if m[4] != "" && m[4] == m[5] { // e.g. //foo:foo + editPerformed = true + str.Value = m[1] + } else if m[3] != "" && m[4] == "" && m[3] == m[5] { // e.g. @foo//:foo + editPerformed = true + str.Value = "@" + m[3] + } + if editPerformed { + info.EditLabel++ + } + } + + Walk(f, func(v Expr, stk []Expr) { + switch v := v.(type) { + case *CallExpr: + if leaveAlone(stk, v) { + return + } + for i := range v.List { + if leaveAlone1(v.List[i]) { + continue + } + as, ok := v.List[i].(*BinaryExpr) + if !ok || as.Op != "=" { + continue + } + key, ok := as.X.(*LiteralExpr) + if !ok || !tables.IsLabelArg[key.Token] || tables.LabelBlacklist[callName(v)+"."+key.Token] { + continue + } + if leaveAlone1(as.Y) { + continue + } + if list, ok := as.Y.(*ListExpr); ok { + for i := range list.List { + if leaveAlone1(list.List[i]) { + continue + } + joinLabel(&list.List[i]) + shortenLabel(list.List[i]) + } + } + if set, ok := as.Y.(*SetExpr); ok { + for i := range set.List { + if leaveAlone1(set.List[i]) { + continue + } + joinLabel(&set.List[i]) + shortenLabel(set.List[i]) + } + } else { + joinLabel(&as.Y) + shortenLabel(as.Y) + } + } + } + }) +} + +// callName returns the name of the rule being called by call. +// If the call is not to a literal rule name, callName returns "". +func callName(call *CallExpr) string { + rule, ok := call.X.(*LiteralExpr) + if !ok { + return "" + } + return rule.Token +} + +// sortCallArgs sorts lists of named arguments to a call. +func sortCallArgs(f *File, info *RewriteInfo) { + Walk(f, func(v Expr, stk []Expr) { + call, ok := v.(*CallExpr) + if !ok { + return + } + if leaveAlone(stk, call) { + return + } + rule := callName(call) + if rule == "" { + return + } + + // Find the tail of the argument list with named arguments. + start := len(call.List) + for start > 0 && argName(call.List[start-1]) != "" { + start-- + } + + // Record information about each arg into a sortable list. + var args namedArgs + for i, x := range call.List[start:] { + name := argName(x) + args = append(args, namedArg{ruleNamePriority(rule, name), name, i, x}) + } + + // Sort the list and put the args back in the new order. + if sort.IsSorted(args) { + return + } + info.SortCall++ + sort.Sort(args) + for i, x := range args { + call.List[start+i] = x.expr + } + }) +} + +// ruleNamePriority maps a rule argument name to its sorting priority. +// It could use the auto-generated per-rule tables but for now it just +// falls back to the original list. +func ruleNamePriority(rule, arg string) int { + ruleArg := rule + "." + arg + if val, ok := tables.NamePriority[ruleArg]; ok { + return val + } + return tables.NamePriority[arg] + /* + list := ruleArgOrder[rule] + if len(list) == 0 { + return tables.NamePriority[arg] + } + for i, x := range list { + if x == arg { + return i + } + } + return len(list) + */ +} + +// If x is of the form key=value, argName returns the string key. +// Otherwise argName returns "". +func argName(x Expr) string { + if as, ok := x.(*BinaryExpr); ok && as.Op == "=" { + if id, ok := as.X.(*LiteralExpr); ok { + return id.Token + } + } + return "" +} + +// A namedArg records information needed for sorting +// a named call argument into its proper position. +type namedArg struct { + priority int // kind of name; first sort key + name string // name; second sort key + index int // original index; final sort key + expr Expr // name=value argument +} + +// namedArgs is a slice of namedArg that implements sort.Interface +type namedArgs []namedArg + +func (x namedArgs) Len() int { return len(x) } +func (x namedArgs) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x namedArgs) Less(i, j int) bool { + p := x[i] + q := x[j] + if p.priority != q.priority { + return p.priority < q.priority + } + if p.name != q.name { + return p.name < q.name + } + return p.index < q.index +} + +// sortStringLists sorts lists of string literals used as specific rule arguments. +func sortStringLists(f *File, info *RewriteInfo) { + Walk(f, func(v Expr, stk []Expr) { + switch v := v.(type) { + case *CallExpr: + if leaveAlone(stk, v) { + return + } + rule := callName(v) + for _, arg := range v.List { + if leaveAlone1(arg) { + continue + } + as, ok := arg.(*BinaryExpr) + if !ok || as.Op != "=" || leaveAlone1(as) || doNotSort(as) { + continue + } + key, ok := as.X.(*LiteralExpr) + if !ok { + continue + } + context := rule + "." + key.Token + if !tables.IsSortableListArg[key.Token] || tables.SortableBlacklist[context] { + continue + } + if disabled("unsafesort") && !tables.SortableWhitelist[context] && !allowedSort(context) { + continue + } + sortStringList(as.Y, info, context) + } + case *BinaryExpr: + if disabled("unsafesort") { + return + } + // "keep sorted" comment on x = list forces sorting of list. + as := v + if as.Op == "=" && keepSorted(as) { + sortStringList(as.Y, info, "?") + } + case *KeyValueExpr: + if disabled("unsafesort") { + return + } + // "keep sorted" before key: list also forces sorting of list. + if keepSorted(v) { + sortStringList(v.Value, info, "?") + } + case *ListExpr: + if disabled("unsafesort") { + return + } + // "keep sorted" comment above first list element also forces sorting of list. + if len(v.List) > 0 && keepSorted(v.List[0]) { + sortStringList(v, info, "?") + } + } + }) +} + +// SortStringList sorts x, a list of strings. +func SortStringList(x Expr) { + sortStringList(x, nil, "") +} + +// sortStringList sorts x, a list of strings. +// The list is broken by non-strings and by blank lines and comments into chunks. +// Each chunk is sorted in place. +func sortStringList(x Expr, info *RewriteInfo, context string) { + list, ok := x.(*ListExpr) + if !ok || len(list.List) < 2 || doNotSort(list.List[0]) { + return + } + + forceSort := keepSorted(list.List[0]) + + // TODO(bazel-team): Decide how to recognize lists that cannot + // be sorted. Avoiding all lists with comments avoids sorting + // lists that say explicitly, in some form or another, why they + // cannot be sorted. For example, many cc_test rules require + // certain order in their deps attributes. + if !forceSort { + if line, _ := hasComments(list); line { + return + } + } + + // Sort chunks of the list with no intervening blank lines or comments. + for i := 0; i < len(list.List); { + if _, ok := list.List[i].(*StringExpr); !ok { + i++ + continue + } + + j := i + 1 + for ; j < len(list.List); j++ { + if str, ok := list.List[j].(*StringExpr); !ok || len(str.Before) > 0 { + break + } + } + + var chunk []stringSortKey + for index, x := range list.List[i:j] { + chunk = append(chunk, makeSortKey(index, x.(*StringExpr))) + } + if !sort.IsSorted(byStringExpr(chunk)) || !isUniq(chunk) { + if info != nil { + info.SortStringList++ + if !tables.SortableWhitelist[context] { + info.UnsafeSort++ + info.Log = append(info.Log, "sort:"+context) + } + } + before := chunk[0].x.Comment().Before + chunk[0].x.Comment().Before = nil + + sort.Sort(byStringExpr(chunk)) + chunk = uniq(chunk) + + chunk[0].x.Comment().Before = before + for offset, key := range chunk { + list.List[i+offset] = key.x + } + list.List = append(list.List[:(i+len(chunk))], list.List[j:]...) + } + + i = j + } +} + +// uniq removes duplicates from a list, which must already be sorted. +// It edits the list in place. +func uniq(sortedList []stringSortKey) []stringSortKey { + out := sortedList[:0] + for _, sk := range sortedList { + if len(out) == 0 || sk.value != out[len(out)-1].value { + out = append(out, sk) + } + } + return out +} + +// isUniq reports whether the sorted list only contains unique elements. +func isUniq(list []stringSortKey) bool { + for i := range list { + if i+1 < len(list) && list[i].value == list[i+1].value { + return false + } + } + return true +} + +// If stk describes a call argument like rule(arg=...), callArgName +// returns the name of that argument, formatted as "rule.arg". +func callArgName(stk []Expr) string { + n := len(stk) + if n < 2 { + return "" + } + arg := argName(stk[n-1]) + if arg == "" { + return "" + } + call, ok := stk[n-2].(*CallExpr) + if !ok { + return "" + } + rule, ok := call.X.(*LiteralExpr) + if !ok { + return "" + } + return rule.Token + "." + arg +} + +// A stringSortKey records information about a single string literal to be +// sorted. The strings are first grouped into four phases: most strings, +// strings beginning with ":", strings beginning with "//", and strings +// beginning with "@". The next significant part of the comparison is the list +// of elements in the value, where elements are split at `.' and `:'. Finally +// we compare by value and break ties by original index. +type stringSortKey struct { + phase int + split []string + value string + original int + x Expr +} + +func makeSortKey(index int, x *StringExpr) stringSortKey { + key := stringSortKey{ + value: x.Value, + original: index, + x: x, + } + + switch { + case strings.HasPrefix(x.Value, ":"): + key.phase = 1 + case strings.HasPrefix(x.Value, "//") || (tables.StripLabelLeadingSlashes && !strings.HasPrefix(x.Value, "@")): + key.phase = 2 + case strings.HasPrefix(x.Value, "@"): + key.phase = 3 + } + + key.split = strings.Split(strings.Replace(x.Value, ":", ".", -1), ".") + return key +} + +// byStringExpr implements sort.Interface for a list of stringSortKey. +type byStringExpr []stringSortKey + +func (x byStringExpr) Len() int { return len(x) } +func (x byStringExpr) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byStringExpr) Less(i, j int) bool { + xi := x[i] + xj := x[j] + + if xi.phase != xj.phase { + return xi.phase < xj.phase + } + for k := 0; k < len(xi.split) && k < len(xj.split); k++ { + if xi.split[k] != xj.split[k] { + return xi.split[k] < xj.split[k] + } + } + if len(xi.split) != len(xj.split) { + return len(xi.split) < len(xj.split) + } + if xi.value != xj.value { + return xi.value < xj.value + } + return xi.original < xj.original +} + +// fixMultilinePlus turns +// +// ... + +// [ ... ] +// +// ... + +// call(...) +// +// into +// ... + [ +// ... +// ] +// +// ... + call( +// ... +// ) +// +// which typically works better with our aggressively compact formatting. +func fixMultilinePlus(f *File, info *RewriteInfo) { + + // List manipulation helpers. + // As a special case, we treat f([...]) as a list, mainly + // for glob. + + // isList reports whether x is a list. + var isList func(x Expr) bool + isList = func(x Expr) bool { + switch x := x.(type) { + case *ListExpr: + return true + case *CallExpr: + if len(x.List) == 1 { + return isList(x.List[0]) + } + } + return false + } + + // isMultiLine reports whether x is a multiline list. + var isMultiLine func(Expr) bool + isMultiLine = func(x Expr) bool { + switch x := x.(type) { + case *ListExpr: + return x.ForceMultiLine || len(x.List) > 1 + case *CallExpr: + if x.ForceMultiLine || len(x.List) > 1 && !x.ForceCompact { + return true + } + if len(x.List) == 1 { + return isMultiLine(x.List[0]) + } + } + return false + } + + // forceMultiLine tries to force the list x to use a multiline form. + // It reports whether it was successful. + var forceMultiLine func(Expr) bool + forceMultiLine = func(x Expr) bool { + switch x := x.(type) { + case *ListExpr: + // Already multi line? + if x.ForceMultiLine { + return true + } + // If this is a list containing a list, force the + // inner list to be multiline instead. + if len(x.List) == 1 && forceMultiLine(x.List[0]) { + return true + } + x.ForceMultiLine = true + return true + + case *CallExpr: + if len(x.List) == 1 { + return forceMultiLine(x.List[0]) + } + } + return false + } + + skip := map[Expr]bool{} + Walk(f, func(v Expr, stk []Expr) { + if skip[v] { + return + } + bin, ok := v.(*BinaryExpr) + if !ok || bin.Op != "+" { + return + } + + // Found a +. + // w + x + y + z parses as ((w + x) + y) + z, + // so chase down the left side to make a list of + // all the things being added together, separated + // by the BinaryExprs that join them. + // Mark them as "skip" so that when Walk recurses + // into the subexpressions, we won't reprocess them. + var all []Expr + for { + all = append(all, bin.Y, bin) + bin1, ok := bin.X.(*BinaryExpr) + if !ok || bin1.Op != "+" { + break + } + bin = bin1 + skip[bin] = true + } + all = append(all, bin.X) + + // Because the outermost expression was the + // rightmost one, the list is backward. Reverse it. + for i, j := 0, len(all)-1; i < j; i, j = i+1, j-1 { + all[i], all[j] = all[j], all[i] + } + + // The 'all' slice is alternating addends and BinaryExpr +'s: + // w, +, x, +, y, +, z + // If there are no lists involved, don't rewrite anything. + haveList := false + for i := 0; i < len(all); i += 2 { + if isList(all[i]) { + haveList = true + break + } + } + if !haveList { + return + } + + // Okay, there are lists. + // Consider each + next to a line break. + for i := 1; i < len(all); i += 2 { + bin := all[i].(*BinaryExpr) + if !bin.LineBreak { + continue + } + + // We're going to break the line after the +. + // If it is followed by a list, force that to be + // multiline instead. + if forceMultiLine(all[i+1]) { + bin.LineBreak = false + continue + } + + // If the previous list was multiline already, + // don't bother with the line break after + // the +. + if isMultiLine(all[i-1]) { + bin.LineBreak = false + continue + } + } + }) +} + +// hasComments reports whether any comments are associated with +// the list or its elements. +func hasComments(list *ListExpr) (line, suffix bool) { + com := list.Comment() + if len(com.Before) > 0 || len(com.After) > 0 || len(list.End.Before) > 0 { + line = true + } + if len(com.Suffix) > 0 { + suffix = true + } + for _, elem := range list.List { + com := elem.Comment() + if len(com.Before) > 0 { + line = true + } + if len(com.Suffix) > 0 { + suffix = true + } + } + return +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/rule.go b/vendor/github.com/bazelbuild/buildtools/build/rule.go new file mode 100644 index 00000000000..7f8183628a0 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/rule.go @@ -0,0 +1,260 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Rule-level API for inspecting and modifying a build.File syntax tree. + +package build + +import "strings" + +// A Rule represents a single BUILD rule. +type Rule struct { + Call *CallExpr +} + +// Rules returns the rules in the file of the given kind (such as "go_library"). +// If kind == "", Rules returns all rules in the file. +func (f *File) Rules(kind string) []*Rule { + var all []*Rule + for _, stmt := range f.Stmt { + call, ok := stmt.(*CallExpr) + if !ok { + continue + } + rule := &Rule{call} + if kind != "" && rule.Kind() != kind { + continue + } + all = append(all, rule) + } + return all +} + +// RuleAt returns the rule in the file that starts at the specified line, or null if no such rule. +func (f *File) RuleAt(linenum int) *Rule { + for _, stmt := range f.Stmt { + call, ok := stmt.(*CallExpr) + if !ok { + continue + } + start, end := call.X.Span() + if start.Line <= linenum && linenum <= end.Line { + return &Rule{call} + } + } + return nil +} + +// DelRules removes rules with the given kind and name from the file. +// An empty kind matches all kinds; an empty name matches all names. +// It returns the number of rules that were deleted. +func (f *File) DelRules(kind, name string) int { + var i int + for _, stmt := range f.Stmt { + if call, ok := stmt.(*CallExpr); ok { + r := &Rule{call} + if (kind == "" || r.Kind() == kind) && + (name == "" || r.AttrString("name") == name) { + continue + } + } + f.Stmt[i] = stmt + i++ + } + n := len(f.Stmt) - i + f.Stmt = f.Stmt[:i] + return n +} + +// Kind returns the rule's kind (such as "go_library"). +// The kind of the rule may be given by a literal or it may be a sequence of dot expressions that +// begins with a literal, if the call expression does not conform to either of these forms, an +// empty string will be returned +func (r *Rule) Kind() string { + var names []string + expr := r.Call.X + for { + x, ok := expr.(*DotExpr) + if !ok { + break + } + names = append(names, x.Name) + expr = x.X + } + x, ok := expr.(*LiteralExpr) + if !ok { + return "" + } + names = append(names, x.Token) + // Reverse the elements since the deepest expression contains the leading literal + for l, r := 0, len(names)-1; l < r; l, r = l+1, r-1 { + names[l], names[r] = names[r], names[l] + } + return strings.Join(names, ".") +} + +// SetKind changes rule's kind (such as "go_library"). +func (r *Rule) SetKind(kind string) { + names := strings.Split(kind, ".") + var expr Expr + expr = &LiteralExpr{Token: names[0]} + for _, name := range names[1:] { + expr = &DotExpr{X: expr, Name: name} + } + r.Call.X = expr +} + +// Name returns the rule's target name. +// If the rule has no target name, Name returns the empty string. +func (r *Rule) Name() string { + return r.AttrString("name") +} + +// AttrKeys returns the keys of all the rule's attributes. +func (r *Rule) AttrKeys() []string { + var keys []string + for _, expr := range r.Call.List { + if binExpr, ok := expr.(*BinaryExpr); ok && binExpr.Op == "=" { + if keyExpr, ok := binExpr.X.(*LiteralExpr); ok { + keys = append(keys, keyExpr.Token) + } + } + } + return keys +} + +// AttrDefn returns the BinaryExpr defining the rule's attribute with the given key. +// That is, the result is a *BinaryExpr with Op == "=". +// If the rule has no such attribute, AttrDefn returns nil. +func (r *Rule) AttrDefn(key string) *BinaryExpr { + for _, kv := range r.Call.List { + as, ok := kv.(*BinaryExpr) + if !ok || as.Op != "=" { + continue + } + k, ok := as.X.(*LiteralExpr) + if !ok || k.Token != key { + continue + } + return as + } + return nil +} + +// Attr returns the value of the rule's attribute with the given key +// (such as "name" or "deps"). +// If the rule has no such attribute, Attr returns nil. +func (r *Rule) Attr(key string) Expr { + as := r.AttrDefn(key) + if as == nil { + return nil + } + return as.Y +} + +// DelAttr deletes the rule's attribute with the named key. +// It returns the old value of the attribute, or nil if the attribute was not found. +func (r *Rule) DelAttr(key string) Expr { + list := r.Call.List + for i, kv := range list { + as, ok := kv.(*BinaryExpr) + if !ok || as.Op != "=" { + continue + } + k, ok := as.X.(*LiteralExpr) + if !ok || k.Token != key { + continue + } + copy(list[i:], list[i+1:]) + r.Call.List = list[:len(list)-1] + return as.Y + } + return nil +} + +// SetAttr sets the rule's attribute with the given key to value. +// If the rule has no attribute with the key, SetAttr appends +// one to the end of the rule's attribute list. +func (r *Rule) SetAttr(key string, val Expr) { + as := r.AttrDefn(key) + if as != nil { + as.Y = val + return + } + + r.Call.List = append(r.Call.List, + &BinaryExpr{ + X: &LiteralExpr{Token: key}, + Op: "=", + Y: val, + }, + ) +} + +// AttrLiteral returns the literal form of the rule's attribute +// with the given key (such as "cc_api_version"), only when +// that value is an identifier or number. +// If the rule has no such attribute or the attribute is not an identifier or number, +// AttrLiteral returns "". +func (r *Rule) AttrLiteral(key string) string { + lit, ok := r.Attr(key).(*LiteralExpr) + if !ok { + return "" + } + return lit.Token +} + +// AttrString returns the value of the rule's attribute +// with the given key (such as "name"), as a string. +// If the rule has no such attribute or the attribute has a non-string value, +// Attr returns the empty string. +func (r *Rule) AttrString(key string) string { + str, ok := r.Attr(key).(*StringExpr) + if !ok { + return "" + } + return str.Value +} + +// AttrStrings returns the value of the rule's attribute +// with the given key (such as "srcs"), as a []string. +// If the rule has no such attribute or the attribute is not +// a list of strings, AttrStrings returns a nil slice. +func (r *Rule) AttrStrings(key string) []string { + return Strings(r.Attr(key)) +} + +// Strings returns expr as a []string. +// If expr is not a list of string literals, +// Strings returns a nil slice instead. +// If expr is an empty list of string literals, +// returns a non-nil empty slice. +// (this allows differentiating between these two cases) +func Strings(expr Expr) []string { + list, ok := expr.(*ListExpr) + if !ok { + return nil + } + all := []string{} // not nil + for _, l := range list.List { + str, ok := l.(*StringExpr) + if !ok { + return nil + } + all = append(all, str.Value) + } + return all +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/rule_test.go b/vendor/github.com/bazelbuild/buildtools/build/rule_test.go new file mode 100644 index 00000000000..0e8f0e38597 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/rule_test.go @@ -0,0 +1,120 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package build + +import ( + "testing" +) + +var simpleCall *CallExpr = &CallExpr{ + X: &LiteralExpr{ + Token: "java_library", + }, + List: []Expr{ + &BinaryExpr{ + X: &LiteralExpr{ + Token: "name", + }, + Op: "=", + Y: &StringExpr{ + Value: "x", + }, + }, + }, +} + +var simpleRule *Rule = &Rule{simpleCall} + +var structCall *CallExpr = &CallExpr{ + X: &DotExpr{ + X: &DotExpr{ + X: &LiteralExpr{ + Token: "foo", + }, + Name: "bar", + }, + Name: "baz", + }, + List: []Expr{ + &BinaryExpr{ + X: &LiteralExpr{ + Token: "name", + }, + Op: "=", + Y: &StringExpr{ + Value: "x", + }, + }, + }, +} + +var structRule *Rule = &Rule{structCall} + +func TestKind(t *testing.T) { + if simpleRule.Kind() != "java_library" { + t.Errorf(`simpleRule.Kind() = %v, want "java_library"`, simpleRule.Kind()) + } + if structRule.Kind() != "foo.bar.baz" { + t.Errorf(`structRule.Kind() = %v, want "foo.bar.baz"`, structRule.Kind()) + } +} + +func TestSetKind(t *testing.T) { + rule := &Rule{ + &CallExpr{ + X: &LiteralExpr{ + Token: "java_library", + }, + List: []Expr{ + &BinaryExpr{ + X: &LiteralExpr{ + Token: "name", + }, + Op: "=", + Y: &StringExpr{ + Value: "x", + }, + }, + }, + }, + } + + rule.SetKind("java_binary") + compare(t, rule.Call.X, &LiteralExpr{Token: "java_binary"}) + + rule.SetKind("foo.bar.baz") + compare(t, rule.Call.X, &DotExpr{ + X: &DotExpr{ + X: &LiteralExpr{ + Token: "foo", + }, + Name: "bar", + }, + Name: "baz", + }) +} + +func TestRules(t *testing.T) { + f := &File{ + Stmt: []Expr{ + simpleCall, + structCall, + }, + } + + compare(t, f.Rules(""), []*Rule{simpleRule, structRule}) + compare(t, f.Rules("java_binary"), []*Rule(nil)) + compare(t, f.Rules("java_library"), []*Rule{simpleRule}) + compare(t, f.Rules("foo.bar.baz"), []*Rule{structRule}) +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/syntax.go b/vendor/github.com/bazelbuild/buildtools/build/syntax.go new file mode 100644 index 00000000000..42023f31c71 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/syntax.go @@ -0,0 +1,453 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Package build implements parsing and printing of BUILD files. +package build + +// Syntax data structure definitions. + +import ( + "strings" + "unicode/utf8" +) + +// A Position describes the position between two bytes of input. +type Position struct { + Line int // line in input (starting at 1) + LineRune int // rune in line (starting at 1) + Byte int // byte in input (starting at 0) +} + +// add returns the position at the end of s, assuming it starts at p. +func (p Position) add(s string) Position { + p.Byte += len(s) + if n := strings.Count(s, "\n"); n > 0 { + p.Line += n + s = s[strings.LastIndex(s, "\n")+1:] + p.LineRune = 1 + } + p.LineRune += utf8.RuneCountInString(s) + return p +} + +// An Expr represents an input element. +type Expr interface { + // Span returns the start and end position of the expression, + // excluding leading or trailing comments. + Span() (start, end Position) + + // Comment returns the comments attached to the expression. + // This method would normally be named 'Comments' but that + // would interfere with embedding a type of the same name. + Comment() *Comments +} + +// A Comment represents a single # comment. +type Comment struct { + Start Position + Token string // without trailing newline + Suffix bool // an end of line (not whole line) comment +} + +// Comments collects the comments associated with an expression. +type Comments struct { + Before []Comment // whole-line comments before this expression + Suffix []Comment // end-of-line comments after this expression + + // For top-level expressions only, After lists whole-line + // comments following the expression. + After []Comment +} + +// Comment returns the receiver. This isn't useful by itself, but +// a Comments struct is embedded into all the expression +// implementation types, and this gives each of those a Comment +// method to satisfy the Expr interface. +func (c *Comments) Comment() *Comments { + return c +} + +// A File represents an entire BUILD file. +type File struct { + Path string // file path, relative to workspace directory + Comments + Stmt []Expr +} + +func (x *File) Span() (start, end Position) { + if len(x.Stmt) == 0 { + return + } + start, _ = x.Stmt[0].Span() + _, end = x.Stmt[len(x.Stmt)-1].Span() + return start, end +} + +// A CommentBlock represents a top-level block of comments separate +// from any rule. +type CommentBlock struct { + Comments + Start Position +} + +func (x *CommentBlock) Span() (start, end Position) { + return x.Start, x.Start +} + +// A PythonBlock represents a blob of Python code, typically a def or for loop. +type PythonBlock struct { + Comments + Start Position + Token string // raw Python code, including final newline +} + +func (x *PythonBlock) Span() (start, end Position) { + return x.Start, x.Start.add(x.Token) +} + +// A LiteralExpr represents a literal identifier or number. +type LiteralExpr struct { + Comments + Start Position + Token string // identifier token +} + +func (x *LiteralExpr) Span() (start, end Position) { + return x.Start, x.Start.add(x.Token) +} + +// A StringExpr represents a single literal string. +type StringExpr struct { + Comments + Start Position + Value string // string value (decoded) + TripleQuote bool // triple quote output + End Position + + // To allow specific formatting of string literals, + // at least within our requirements, record the + // preferred form of Value. This field is a hint: + // it is only used if it is a valid quoted form for Value. + Token string +} + +func (x *StringExpr) Span() (start, end Position) { + return x.Start, x.End +} + +// An End represents the end of a parenthesized or bracketed expression. +// It is a place to hang comments. +type End struct { + Comments + Pos Position +} + +func (x *End) Span() (start, end Position) { + return x.Pos, x.Pos.add(")") +} + +// A CallExpr represents a function call expression: X(List). +type CallExpr struct { + Comments + X Expr + ListStart Position // position of ( + List []Expr + End // position of ) + ForceCompact bool // force compact (non-multiline) form when printing + ForceMultiLine bool // force multiline form when printing +} + +func (x *CallExpr) Span() (start, end Position) { + start, _ = x.X.Span() + return start, x.End.Pos.add(")") +} + +// A DotExpr represents a field selector: X.Name. +type DotExpr struct { + Comments + X Expr + Dot Position + NamePos Position + Name string +} + +func (x *DotExpr) Span() (start, end Position) { + start, _ = x.X.Span() + return start, x.NamePos.add(x.Name) +} + +// A ListForExpr represents a list comprehension expression: [X for ... if ...]. +type ListForExpr struct { + Comments + ForceMultiLine bool // split expression across multiple lines + Brack string // "", "()", or "[]" + Start Position + X Expr + For []*ForClauseWithIfClausesOpt + End +} + +func (x *ListForExpr) Span() (start, end Position) { + return x.Start, x.End.Pos.add("]") +} + +// A ForClause represents a for clause in a list comprehension: for Var in Expr. +type ForClause struct { + Comments + For Position + Var []Expr + In Position + Expr Expr +} + +func (x *ForClause) Span() (start, end Position) { + _, end = x.Expr.Span() + return x.For, end +} + +// An IfClause represents an if clause in a list comprehension: if Cond. +type IfClause struct { + Comments + If Position + Cond Expr +} + +func (x *IfClause) Span() (start, end Position) { + _, end = x.Cond.Span() + return x.If, end +} + +// A ForClauseWithIfClausesOpt represents a for clause in a list comprehension followed by optional +// if expressions: for ... in ... [if ... if ...] +type ForClauseWithIfClausesOpt struct { + Comments + For *ForClause + Ifs []*IfClause +} + +func (x *ForClauseWithIfClausesOpt) Span() (start, end Position) { + start, end = x.For.Span() + if len(x.Ifs) > 0 { + _, end = x.Ifs[len(x.Ifs)-1].Span() + } + + return start, end +} + +// A KeyValueExpr represents a dictionary entry: Key: Value. +type KeyValueExpr struct { + Comments + Key Expr + Colon Position + Value Expr +} + +func (x *KeyValueExpr) Span() (start, end Position) { + start, _ = x.Key.Span() + _, end = x.Value.Span() + return start, end +} + +// A DictExpr represents a dictionary literal: { List }. +type DictExpr struct { + Comments + Start Position + List []Expr // all *KeyValueExprs + Comma Position // position of trailing comma, if any + End + ForceMultiLine bool // force multiline form when printing +} + +func (x *DictExpr) Span() (start, end Position) { + return x.Start, x.End.Pos.add("}") +} + +// A ListExpr represents a list literal: [ List ]. +type ListExpr struct { + Comments + Start Position + List []Expr + Comma Position // position of trailing comma, if any + End + ForceMultiLine bool // force multiline form when printing +} + +func (x *ListExpr) Span() (start, end Position) { + return x.Start, x.End.Pos.add("]") +} + +// A SetExpr represents a set literal: { List }. +type SetExpr struct { + Comments + Start Position + List []Expr + Comma Position // position of trailing comma, if any + End + ForceMultiLine bool // force multiline form when printing +} + +func (x *SetExpr) Span() (start, end Position) { + return x.Start, x.End.Pos.add("}") +} + +// A TupleExpr represents a tuple literal: (List) +type TupleExpr struct { + Comments + Start Position + List []Expr + Comma Position // position of trailing comma, if any + End + ForceCompact bool // force compact (non-multiline) form when printing + ForceMultiLine bool // force multiline form when printing +} + +func (x *TupleExpr) Span() (start, end Position) { + return x.Start, x.End.Pos.add(")") +} + +// A UnaryExpr represents a unary expression: Op X. +type UnaryExpr struct { + Comments + OpStart Position + Op string + X Expr +} + +func (x *UnaryExpr) Span() (start, end Position) { + _, end = x.X.Span() + return x.OpStart, end +} + +// A BinaryExpr represents a binary expression: X Op Y. +type BinaryExpr struct { + Comments + X Expr + OpStart Position + Op string + LineBreak bool // insert line break between Op and Y + Y Expr +} + +func (x *BinaryExpr) Span() (start, end Position) { + start, _ = x.X.Span() + _, end = x.Y.Span() + return start, end +} + +// A ParenExpr represents a parenthesized expression: (X). +type ParenExpr struct { + Comments + Start Position + X Expr + End + ForceMultiLine bool // insert line break after opening ( and before closing ) +} + +func (x *ParenExpr) Span() (start, end Position) { + return x.Start, x.End.Pos.add(")") +} + +// A SliceExpr represents a slice expression: expr[from:to] or expr[from:to:step] . +type SliceExpr struct { + Comments + X Expr + SliceStart Position + From Expr + FirstColon Position + To Expr + SecondColon Position + Step Expr + End Position +} + +func (x *SliceExpr) Span() (start, end Position) { + start, _ = x.X.Span() + return start, x.End +} + +// An IndexExpr represents an index expression: X[Y]. +type IndexExpr struct { + Comments + X Expr + IndexStart Position + Y Expr + End Position +} + +func (x *IndexExpr) Span() (start, end Position) { + start, _ = x.X.Span() + return start, x.End +} + +// A LambdaExpr represents a lambda expression: lambda Var: Expr. +type LambdaExpr struct { + Comments + Lambda Position + Var []Expr + Colon Position + Expr Expr +} + +func (x *LambdaExpr) Span() (start, end Position) { + _, end = x.Expr.Span() + return x.Lambda, end +} + +// ConditionalExpr represents the conditional: X if TEST else ELSE. +type ConditionalExpr struct { + Comments + Then Expr + IfStart Position + Test Expr + ElseStart Position + Else Expr +} + +// Span returns the start and end position of the expression, +// excluding leading or trailing comments. +func (x *ConditionalExpr) Span() (start, end Position) { + start, _ = x.Then.Span() + _, end = x.Else.Span() + return start, end +} + +// A CodeBlock represents an indented code block. +type CodeBlock struct { + Statements []Expr + Start Position + End +} + +func (x *CodeBlock) Span() (start, end Position) { + return x.Start, x.End.Pos +} + +// A FuncDef represents a function definition expression: def foo(List):. +type FuncDef struct { + Comments + Start Position // position of def + Name string + ListStart Position // position of ( + Args []Expr + Body CodeBlock + End // position of the end + ForceCompact bool // force compact (non-multiline) form when printing + ForceMultiLine bool // force multiline form when printing +} + +func (x *FuncDef) Span() (start, end Position) { + return x.Start, x.End.Pos.add(":") +} diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/001.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/001.golden new file mode 100644 index 00000000000..56ba48a8568 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/001.golden @@ -0,0 +1,16 @@ +cc_test( + name = "bar", + size = "small", + srcs = [ + "a.cc", + "b.cc", + "c.cc", + ], + data = ["datum"], + datum = ["data"], + deps = [ + ":foo", + "//base", + "//util:map-util", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/001.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/001.in new file mode 100644 index 00000000000..ce0b1c93767 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/001.in @@ -0,0 +1 @@ +cc_test(name="bar",size="small",srcs=["a.cc","b.cc","c.cc"],deps=["//base",":foo","//util:map-util"], data = [ "datum" ], datum = [ "data", ]) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/002.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/002.golden new file mode 100644 index 00000000000..3dd2f84bf8f --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/002.golden @@ -0,0 +1,14 @@ +cc_test( + name = "b\"ar'\"", + size = "small", + srcs = [ + "a.cc", + "b.cc", + "c.cc", + ], + deps = [ + ":foo", + "//base", + "//util:map-util", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/002.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/002.in new file mode 100644 index 00000000000..6f318aeaf95 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/002.in @@ -0,0 +1 @@ +cc_test ( name = 'b\"ar\'"' , srcs = [ 'a.cc' , "b.cc" , "c.cc" ] , size = "small" , deps = [ "//base" , ":foo", "//util:map-util", ] ) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/004.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/004.golden new file mode 100644 index 00000000000..46cd9f9c0a6 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/004.golden @@ -0,0 +1,6 @@ +JAVA_FILES = [ + "Foo.java", + "Bar.java", + "Baz.java", + "Quux.java", +] diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/004.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/004.in new file mode 100644 index 00000000000..34f23eede47 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/004.in @@ -0,0 +1,3 @@ +JAVA_FILES = [ "Foo.java", "Bar.java", + "Baz.java", "Quux.java" + ] diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/005.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/005.golden new file mode 100644 index 00000000000..9cb3bb79947 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/005.golden @@ -0,0 +1,7 @@ +JAVA_FILES = [ + # Comment regarding Foo.java + "Foo.java", + "Bar.java", + "Baz.java", # Comment regarding Baz.java + "Quux.java", +] diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/005.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/005.in new file mode 100644 index 00000000000..8508896e334 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/005.in @@ -0,0 +1,7 @@ +JAVA_FILES = [ + # Comment regarding Foo.java + "Foo.java", + "Bar.java", + "Baz.java", # Comment regarding Baz.java + "Quux.java" +] diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/007.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/007.golden new file mode 100644 index 00000000000..43955a3b7cb --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/007.golden @@ -0,0 +1,11 @@ +exports_files([ + "url_substring_blacklist", + "url_substring_blacklist_short", +]) + +package(default_visibility = ["//foo:bar"]) + +package(default_visibility = [ + "//bar:baz", + "//foo:bar", +]) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/007.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/007.in new file mode 100644 index 00000000000..13d8947cebc --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/007.in @@ -0,0 +1,6 @@ +exports_files([ "url_substring_blacklist", + "url_substring_blacklist_short" ]) + +package(default_visibility = [ "//foo:bar" ]) + +package(default_visibility = [ "//foo:bar", "//bar:baz" ]) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/009.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/009.golden new file mode 100644 index 00000000000..8c3ee2d3849 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/009.golden @@ -0,0 +1,49 @@ +# buildifier: leave-alone +L1 = [ + "C", + "D", + "A", + "Z", + "B", +] + +L2 = [ + "C", + "D", + "A", + "Z", + "B", +] + +# All of the GSS, in the order they should be compiled in. +# buildifier: leave-alone +filegroup( + name = "all_gss", + srcs = [ + "iphone_sprites.gss", + ":iphone_imagedata_gss", + ":multi_resolution_imagedata_gss", + ":iphone_sprite_images_gss", + "iphone/00_topstyles.gss", + "iphone/avatar.gss", + "iphone/chrome.gss", + "iphone/conversation.gss", + "iphone/error.gss", + "iphone/installer.gss", + "iphone/labels.gss", + "iphone/menu.gss", + "iphone/messageview.gss", + "iphone/search.gss", + "iphone/threadlist.gss", + "iphone/toolbar.gss", + "iphone/zz_bottomstyles.gss", + "iphone/overlay.gss", + "iphone/moremenu.gss", + "iphone/settings.gss", + "iphone/netstat.gss", + "iphone/mouse.gss", + "iphone/sprite_overrides.gss", + "iphone/iconmenu.gss", + "iphone/blpromo.gss", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/009.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/009.in new file mode 100644 index 00000000000..8c3ee2d3849 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/009.in @@ -0,0 +1,49 @@ +# buildifier: leave-alone +L1 = [ + "C", + "D", + "A", + "Z", + "B", +] + +L2 = [ + "C", + "D", + "A", + "Z", + "B", +] + +# All of the GSS, in the order they should be compiled in. +# buildifier: leave-alone +filegroup( + name = "all_gss", + srcs = [ + "iphone_sprites.gss", + ":iphone_imagedata_gss", + ":multi_resolution_imagedata_gss", + ":iphone_sprite_images_gss", + "iphone/00_topstyles.gss", + "iphone/avatar.gss", + "iphone/chrome.gss", + "iphone/conversation.gss", + "iphone/error.gss", + "iphone/installer.gss", + "iphone/labels.gss", + "iphone/menu.gss", + "iphone/messageview.gss", + "iphone/search.gss", + "iphone/threadlist.gss", + "iphone/toolbar.gss", + "iphone/zz_bottomstyles.gss", + "iphone/overlay.gss", + "iphone/moremenu.gss", + "iphone/settings.gss", + "iphone/netstat.gss", + "iphone/mouse.gss", + "iphone/sprite_overrides.gss", + "iphone/iconmenu.gss", + "iphone/blpromo.gss", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/011.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/011.golden new file mode 100644 index 00000000000..432d0e20235 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/011.golden @@ -0,0 +1 @@ +java_library(name = "file_without_trailing_newline") diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/011.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/011.in new file mode 100644 index 00000000000..fa6610b8fdd --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/011.in @@ -0,0 +1 @@ +java_library(name = "file_without_trailing_newline") \ No newline at end of file diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/014.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/014.golden new file mode 100644 index 00000000000..1e5c5f4b4c4 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/014.golden @@ -0,0 +1,15 @@ +foo = [x for x in (1, 2, 3)] + +bar( + a = -1, + b = not foo, +) + +cc_library( + name = "bot_scorer", + srcs = [ + "bot_scorer.cc", + "bot_scorer.h", + ], + deps = [":logdata_protolib"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/014.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/014.in new file mode 100644 index 00000000000..8397d1bacd8 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/014.in @@ -0,0 +1,11 @@ +foo = [ x for x in (1, 2, 3) ] + +bar(a=-1, b=not foo) + +cc_library( + name = "bot_scorer", + srcs = [ + "bot_scorer.cc", + "bot_scorer.h", + ], + deps = [ ":logdata_protolib" ]) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/015.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/015.golden new file mode 100644 index 00000000000..4b52602a45d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/015.golden @@ -0,0 +1,10 @@ +# shouldn't add trailing spaces to the blank line after the first comment. +cc_binary( + deps = [ + # comment before blank line + + # comment after blank line + "y", + "a", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/015.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/015.in new file mode 100644 index 00000000000..081624e325d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/015.in @@ -0,0 +1,9 @@ +# shouldn't add trailing spaces to the blank line after the first comment. +cc_binary( + deps = [ + # comment before blank line + + # comment after blank line + "y", + "a", + ]) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/016.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/016.golden new file mode 100644 index 00000000000..339278d9518 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/016.golden @@ -0,0 +1,63 @@ +# 1-element tuple with trailing comma must preserve it. + +x = (1,) + +y = ( + 1, +) + +foo((1,)) + +bar(1) + +baz([1]) + +foo(( + 1, +)) + +bar( + 1, +) + +baz([ + 1, +]) + +x = (1, 2, -3) + +y = ( + 1, + 2, + -3, +) + +foo((1, 2)) + +bar(1, 2) + +baz([ + 1, + 2, +]) + +foo(( + 1, + 2, +)) + +foo(( + 1, + 2, + [3], +)) + +bar( + 1, + 2, +) + +baz([ + 1, + 2, +]) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/016.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/016.in new file mode 100644 index 00000000000..79c83f040b5 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/016.in @@ -0,0 +1,46 @@ +# 1-element tuple with trailing comma must preserve it. + +x = (1,) + +y = ( + 1, +) + +foo((1,)) + +bar(1,) + +baz([1,]) + +foo((1, +)) + +bar(1, +) + +baz([1, +]) + +x = (1, 2, -3,) + +y = ( + 1, 2, -3, +) + +foo((1, 2,)) + +bar(1, 2,) + +baz([1, 2,]) + +foo((1, 2, +)) + +foo((1, 2, [3])) + +bar(1, 2, +) + +baz([1, 2, +]) + diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/017.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/017.golden new file mode 100644 index 00000000000..b55a665af9d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/017.golden @@ -0,0 +1,39 @@ +# c1 +greeting = "hello " + \ + "world" # c2 +# c3 + +# c4 +greeting = "hello " + \ + "world" # c5 +# c6 + +# c7 +greeting = "hello " + \ + "world" # c8 +# c9 + +# c10 +greeting = ("hello " + # c11 + "world") # c12 +# c13 + +# c14 +greeting = ("hello " + # c15 + "world") # c16 +# c17 + +# c18 +greeting = ("hello" + # c19 + # c20 + "world") # c21 +# c22 + +greeting = "hello " + \ + "world" # c23 + +greeting = ("hello " + # c24 + "world") + +greeting = ("hello " + # c25 + "world") diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/017.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/017.in new file mode 100644 index 00000000000..8dc4a988243 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/017.in @@ -0,0 +1,39 @@ +# c1 +greeting = "hello " + \ + "world" # c2 +# c3 + +# c4 +greeting = "hello " \ + + "world" # c5 +# c6 + +# c7 +greeting = "hello " + \ + "world" # c8 +# c9 + +# c10 +greeting = ("hello " + # c11 + "world") # c12 +# c13 + +# c14 +greeting = ("hello " # c15 + + "world") # c16 +# c17 + +# c18 +greeting = ("hello" # c19 + + # c20 + "world") # c21 +# c22 + +greeting = "hello " + \ + "world" # c23 + +greeting = ("hello " + # c24 + "world") + +greeting = ("hello " + # c25 + "world") diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/019.golden b/vendor/github.com/bazelbuild/buildtools/build/testdata/019.golden new file mode 100644 index 00000000000..dcb103de9e6 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/019.golden @@ -0,0 +1,307 @@ +cc_library( + name = "abc", + srcs = ["abc.cc"], + deps = [ + ".zzz", + " zzz", + "!zzz", + "\"zzz", + "#zzz", + "$zzz", + "%zzz", + "&zzz", + "'zzz", + "(zzz", + ")zzz", + "*zzz", + "+zzz", + ",zzz", + "-zzz", + "/zzz", + "0zzz", + "123", + "1zzz", + "2zzz", + "3zzz", + "4zzz", + "5zzz", + "6zzz", + "7zzz", + "8zzz", + "9zzz", + ";zzz", + "zzz", + "?zzz", + "Azzz", + "Bzzz", + "Czzz", + "Dzzz", + "Ezzz", + "FOO", + "Fzzz", + "Gzzz", + "Hzzz", + "Izzz", + "Jzzz", + "Kzzz", + "Lzzz", + "Mzzz", + "Nzzz", + "Ozzz", + "Pzzz", + "Qzzz", + "Rzzz", + "Szzz", + "Tzzz", + "Uzzz", + "Vzzz", + "Wzzz", + "Xzzz", + "Yzzz", + "Zzzz", + "[zzz", + "\\zzz", + "]zzz", + "^zzz", + "_zzz", + "`zzz", + "azzz", + "bzzz", + "czzz", + "dzzz", + "ezzz", + "foo", + "fzzz", + "gzzz", + "hzzz", + "izzz", + "jzzz", + "kzzz", + "lzzz", + "mzzz", + "nzzz", + "ozzz", + "pzzz", + "qzzz", + "rzzz", + "szzz", + "tzzz", + "uzzz", + "vzzz", + "wzzz", + "xzzz", + "yzzz", + "zzz.", + "zzz:", + "zzz ", + "zzz!", + "zzz#", + "zzz$", + "zzz%", + "zzz&", + "zzz'", + "zzz(", + "zzz)", + "zzz*", + "zzz+", + "zzz,", + "zzz-", + "zzz/", + "zzz0", + "zzz1", + "zzz2", + "zzz3", + "zzz4", + "zzz5", + "zzz6", + "zzz7", + "zzz8", + "zzz9", + "zzz;", + "zzz<", + "zzz=", + "zzz>", + "zzz?", + "zzz@", + "zzzA", + "zzzB", + "zzzC", + "zzzD", + "zzzE", + "zzzF", + "zzzG", + "zzzH", + "zzzI", + "zzzJ", + "zzzK", + "zzzL", + "zzzM", + "zzzN", + "zzzO", + "zzzP", + "zzzQ", + "zzzR", + "zzzS", + "zzzT", + "zzzU", + "zzzV", + "zzzW", + "zzzX", + "zzzY", + "zzzZ", + "zzz[", + "zzz\\", + "zzz]", + "zzz^", + "zzz_", + "zzz`", + "zzza", + "zzzb", + "zzzc", + "zzzd", + "zzze", + "zzzf", + "zzzg", + "zzzh", + "zzzi", + "zzzj", + "zzzk", + "zzzl", + "zzzm", + "zzzn", + "zzzo", + "zzzp", + "zzzq", + "zzzr", + "zzzs", + "zzzt", + "zzzu", + "zzzv", + "zzzw", + "zzzx", + "zzzy", + "zzzz", + "zzz{", + "zzz|", + "zzz}", + "zzz~", + "{asdf}", + "{zzz", + "|zzz", + "}zzz", + "~zzz", + ":a", + ":zzz", + "//a", + "//a:b", + "//a.zzz", + "//a:zzz", + "//a zzz", + "//a!zzz", + "//a#zzz", + "//a$zzz", + "//a%zzz", + "//a&zzz", + "//a'zzz", + "//a(zzz", + "//a)zzz", + "//a*zzz", + "//a+zzz", + "//a,zzz", + "//a-zzz", + "//a/b", + "//a/b:c", + "//a/b:d", + "//a/b/c", + "//a/b/c:d", + "//a/zzz", + "//a0zzz", + "//a1zzz", + "//a2", + "//a2:a", + "//a2:b", + "//a2zzz", + "//a3zzz", + "//a4zzz", + "//a5zzz", + "//a6zzz", + "//a7zzz", + "//a8zzz", + "//a9zzz", + "//a;zzz", + "//azzz", + "//a?zzz", + "//a@zzz", + "//aAzzz", + "//aBzzz", + "//aCzzz", + "//aDzzz", + "//aEzzz", + "//aFzzz", + "//aGzzz", + "//aHzzz", + "//aIzzz", + "//aJzzz", + "//aKzzz", + "//aLzzz", + "//aMzzz", + "//aNzzz", + "//aOzzz", + "//aPzzz", + "//aQzzz", + "//aRzzz", + "//aSzzz", + "//aTzzz", + "//aUzzz", + "//aVzzz", + "//aWzzz", + "//aXzzz", + "//aYzzz", + "//aZzzz", + "//a[zzz", + "//a\\zzz", + "//a]zzz", + "//a^zzz", + "//a_zzz", + "//a`zzz", + "//aazzz", + "//abzzz", + "//aczzz", + "//adzzz", + "//aezzz", + "//afzzz", + "//agzzz", + "//ahzzz", + "//aizzz", + "//ajzzz", + "//akzzz", + "//alzzz", + "//amzzz", + "//anzzz", + "//aozzz", + "//apzzz", + "//aqzzz", + "//arzzz", + "//aszzz", + "//atzzz", + "//auzzz", + "//avzzz", + "//awzzz", + "//axzzz", + "//ayzzz", + "//azzz", + "//azzzz", + "//a{zzz", + "//a|zzz", + "//a}zzz", + "//a~zzz", + "@abc", + "@abc//:xyz", + "@abc//foo", + "@zzz", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/build/testdata/019.in b/vendor/github.com/bazelbuild/buildtools/build/testdata/019.in new file mode 100644 index 00000000000..ed857295a5e --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/build/testdata/019.in @@ -0,0 +1,307 @@ +cc_library(name = "abc", + srcs = [ "abc.cc" ], + deps = [ + "Wzzz", + "//aEzzz", + "6zzz", + "[zzz", + "//aSzzz", + "//a?zzz", + "//aLzzz", + "//a%zzz", + "//a|zzz", + "Zzzz", + "^zzz", + "Kzzz", + " zzz", + "//aTzzz", + "//aFzzz", + "ezzz", + "//a:a", + ",zzz", + "mzzz", + "Xzzz", + "lzzz", + "//a^zzz", + "fzzz", + "//a/b/c:d", + "$zzz", + "//a0zzz", + "foo", + "jzzz", + "4zzz", + "}zzz", + "Bzzz", + "//aazzz", + "//a5zzz", + ":a", + "\\zzz", + "2zzz", + "@abc//foo:foo", + "//ahzzz", + "yzzz", + "//adzzz", + "@abc//:xyz", + "//a{zzz", + "vzzz", + ">zzz", + "//aZzzz", + "@zzz", + "@abc//:abc", + "//azzz", + "Vzzz", + "//a]zzz", + "//a-zzz", + "//a/b:c", + "Yzzz", + "//aAzzz", + "&zzz", + "//a>zzz", + "bzzz", + "//a", + "Uzzz", + "zzzu", + "//a!zzz", + "zzz*", + "zzz[", + "zzzy", + "//aezzz", + "zzzp", + "//a&zzz", + "xzzz", + "zzz^", + "//a1zzz", + "zzz_", + "zzzs", + "zzzc", + "zzzK", + "//a/zzz", + "zzzz", + "zzz;", + "zzzn", + "zzzQ", + "zzz\\", + "123", + "3zzz", + "//a/b/c:c", + "zzz7", + "//avzzz", + "Ezzz", + "zzzT", + "9zzz", + "zzzg", + "//a4zzz", + "/zzz", + "//a6zzz", + "zzz9", + "czzz", + "//a2:b", + "{zzz", + "//aJzzz", + "zzz2", + "zzz%", + "zzzx", + "dzzz", + "zzz)", + "//a)zzz", + "zzz.", + "//aOzzz", + "zzzb", + "Hzzz", + "=zzz", + "zzz}", + "zzzh", + ";zzz", + "zzz:", + "//a*zzz", + "5zzz", + "zzzY", + "zzz1", + "-zzz", + "//a}zzz", + "zzz/", + "{asdf}", + "zzzG", + "zzzk", + "//arzzz", + "zzzj", + "zzzl", + "zzz|", + "(zzz", + "zzz5", + "zzz8", + "zzzX", + "zzzP", + "zzz0", + "zzzU", + "zzz$", + "zzzN", + "zzz!", + "FOO", + "//a:b", + "zzz-", + "//aizzz", + "zzzV", + "zzzz", + "zzz`", + "zzzm", + "zzz?", + "tzzz", + "//a$zzz", + "zzz,", + "zzzC", + "zzz'", + "zzzr", + "zzzw", + "zzzM", + "+zzz", + "zzzL", + "zzzq", + "'zzz", + "Pzzz", + "zzzJ", + "zzz=", + "zzzW", + "Nzzz", + "zzzR", + "zzzZ", + "zzz4", + "zzzB", + "zzzA", + "zzz#", + "_". There currently defined setting + // types are: + // + // - 'blaze': settings implemented in Blaze itself + DefaultSetting []string `protobuf:"bytes,7,rep,name=default_setting,json=defaultSetting" json:"default_setting,omitempty"` + // The location of the target in the BUILD file in a machine-parseable form. + DEPRECATEDParseableLocation *Location `protobuf:"bytes,8,opt,name=DEPRECATED_parseable_location,json=DEPRECATEDParseableLocation" json:"DEPRECATED_parseable_location,omitempty"` + // The rule's class's public by default value. + PublicByDefault *bool `protobuf:"varint,9,opt,name=public_by_default,json=publicByDefault" json:"public_by_default,omitempty"` + // If this rule is of a skylark-defined RuleClass. + IsSkylark *bool `protobuf:"varint,10,opt,name=is_skylark,json=isSkylark" json:"is_skylark,omitempty"` + // List of Skylark aspects that this rule applies. + SkylarkAttributeAspects []*AttributeAspect `protobuf:"bytes,11,rep,name=skylark_attribute_aspects,json=skylarkAttributeAspects" json:"skylark_attribute_aspects,omitempty"` + // Hash encapsulating the behavior of this Skylark rule. Any change to this + // rule's definition that could change its behavior will be reflected here. + SkylarkEnvironmentHashCode *string `protobuf:"bytes,12,opt,name=skylark_environment_hash_code,json=skylarkEnvironmentHashCode" json:"skylark_environment_hash_code,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Rule) Reset() { *m = Rule{} } +func (m *Rule) String() string { return proto.CompactTextString(m) } +func (*Rule) ProtoMessage() {} +func (*Rule) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *Rule) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *Rule) GetRuleClass() string { + if m != nil && m.RuleClass != nil { + return *m.RuleClass + } + return "" +} + +func (m *Rule) GetLocation() string { + if m != nil && m.Location != nil { + return *m.Location + } + return "" +} + +func (m *Rule) GetAttribute() []*Attribute { + if m != nil { + return m.Attribute + } + return nil +} + +func (m *Rule) GetRuleInput() []string { + if m != nil { + return m.RuleInput + } + return nil +} + +func (m *Rule) GetRuleOutput() []string { + if m != nil { + return m.RuleOutput + } + return nil +} + +func (m *Rule) GetDefaultSetting() []string { + if m != nil { + return m.DefaultSetting + } + return nil +} + +func (m *Rule) GetDEPRECATEDParseableLocation() *Location { + if m != nil { + return m.DEPRECATEDParseableLocation + } + return nil +} + +func (m *Rule) GetPublicByDefault() bool { + if m != nil && m.PublicByDefault != nil { + return *m.PublicByDefault + } + return false +} + +func (m *Rule) GetIsSkylark() bool { + if m != nil && m.IsSkylark != nil { + return *m.IsSkylark + } + return false +} + +func (m *Rule) GetSkylarkAttributeAspects() []*AttributeAspect { + if m != nil { + return m.SkylarkAttributeAspects + } + return nil +} + +func (m *Rule) GetSkylarkEnvironmentHashCode() string { + if m != nil && m.SkylarkEnvironmentHashCode != nil { + return *m.SkylarkEnvironmentHashCode + } + return "" +} + +// A pairing of attribute name and a Skylark aspect that is applied to that attribute. +type AttributeAspect struct { + AttributeName *string `protobuf:"bytes,1,req,name=attribute_name,json=attributeName" json:"attribute_name,omitempty"` + Aspect *SkylarkAspect `protobuf:"bytes,2,req,name=aspect" json:"aspect,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *AttributeAspect) Reset() { *m = AttributeAspect{} } +func (m *AttributeAspect) String() string { return proto.CompactTextString(m) } +func (*AttributeAspect) ProtoMessage() {} +func (*AttributeAspect) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} } + +func (m *AttributeAspect) GetAttributeName() string { + if m != nil && m.AttributeName != nil { + return *m.AttributeName + } + return "" +} + +func (m *AttributeAspect) GetAspect() *SkylarkAspect { + if m != nil { + return m.Aspect + } + return nil +} + +// Aspect defined in Skylark. +type SkylarkAspect struct { + ExtensionFileLabel *string `protobuf:"bytes,1,req,name=extension_file_label,json=extensionFileLabel" json:"extension_file_label,omitempty"` + ExportedName *string `protobuf:"bytes,2,req,name=exported_name,json=exportedName" json:"exported_name,omitempty"` + Attribute []*Attribute `protobuf:"bytes,3,rep,name=attribute" json:"attribute,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SkylarkAspect) Reset() { *m = SkylarkAspect{} } +func (m *SkylarkAspect) String() string { return proto.CompactTextString(m) } +func (*SkylarkAspect) ProtoMessage() {} +func (*SkylarkAspect) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} } + +func (m *SkylarkAspect) GetExtensionFileLabel() string { + if m != nil && m.ExtensionFileLabel != nil { + return *m.ExtensionFileLabel + } + return "" +} + +func (m *SkylarkAspect) GetExportedName() string { + if m != nil && m.ExportedName != nil { + return *m.ExportedName + } + return "" +} + +func (m *SkylarkAspect) GetAttribute() []*Attribute { + if m != nil { + return m.Attribute + } + return nil +} + +// Summary of all transitive dependencies of 'rule,' where each dependent +// rule is included only once in the 'dependency' field. Gives complete +// information to analyze the single build target labeled rule.name, +// including optional location of target in BUILD file. +type RuleSummary struct { + Rule *Rule `protobuf:"bytes,1,req,name=rule" json:"rule,omitempty"` + Dependency []*Rule `protobuf:"bytes,2,rep,name=dependency" json:"dependency,omitempty"` + Location *string `protobuf:"bytes,3,opt,name=location" json:"location,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *RuleSummary) Reset() { *m = RuleSummary{} } +func (m *RuleSummary) String() string { return proto.CompactTextString(m) } +func (*RuleSummary) ProtoMessage() {} +func (*RuleSummary) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} } + +func (m *RuleSummary) GetRule() *Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (m *RuleSummary) GetDependency() []*Rule { + if m != nil { + return m.Dependency + } + return nil +} + +func (m *RuleSummary) GetLocation() string { + if m != nil && m.Location != nil { + return *m.Location + } + return "" +} + +// A package group. Aside from the name, it contains the list of packages +// present in the group (as specified in the BUILD file). +type PackageGroup struct { + // The name of the package group + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + // The list of packages as specified in the BUILD file. Currently this is + // only a list of packages, but some time in the future, there might be + // some type of wildcard mechanism. + ContainedPackage []string `protobuf:"bytes,2,rep,name=contained_package,json=containedPackage" json:"contained_package,omitempty"` + // The list of sub package groups included in this one. + IncludedPackageGroup []string `protobuf:"bytes,3,rep,name=included_package_group,json=includedPackageGroup" json:"included_package_group,omitempty"` + // The location of the target in the BUILD file in a machine-parseable form. + DEPRECATEDParseableLocation *Location `protobuf:"bytes,4,opt,name=DEPRECATED_parseable_location,json=DEPRECATEDParseableLocation" json:"DEPRECATED_parseable_location,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *PackageGroup) Reset() { *m = PackageGroup{} } +func (m *PackageGroup) String() string { return proto.CompactTextString(m) } +func (*PackageGroup) ProtoMessage() {} +func (*PackageGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} } + +func (m *PackageGroup) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *PackageGroup) GetContainedPackage() []string { + if m != nil { + return m.ContainedPackage + } + return nil +} + +func (m *PackageGroup) GetIncludedPackageGroup() []string { + if m != nil { + return m.IncludedPackageGroup + } + return nil +} + +func (m *PackageGroup) GetDEPRECATEDParseableLocation() *Location { + if m != nil { + return m.DEPRECATEDParseableLocation + } + return nil +} + +// An environment group. +type EnvironmentGroup struct { + // The name of the environment group. + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + // The environments that belong to this group (as labels). + Environment []string `protobuf:"bytes,2,rep,name=environment" json:"environment,omitempty"` + // The member environments that rules implicitly support if not otherwise + // specified. + Default []string `protobuf:"bytes,3,rep,name=default" json:"default,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnvironmentGroup) Reset() { *m = EnvironmentGroup{} } +func (m *EnvironmentGroup) String() string { return proto.CompactTextString(m) } +func (*EnvironmentGroup) ProtoMessage() {} +func (*EnvironmentGroup) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} } + +func (m *EnvironmentGroup) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *EnvironmentGroup) GetEnvironment() []string { + if m != nil { + return m.Environment + } + return nil +} + +func (m *EnvironmentGroup) GetDefault() []string { + if m != nil { + return m.Default + } + return nil +} + +// A file that is an input into the build system. +// Next-Id: 10 +type SourceFile struct { + // The name of the source file (a label). + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + // The location of the source file. This is a path with line numbers, not + // a label in the build system. + Location *string `protobuf:"bytes,2,opt,name=location" json:"location,omitempty"` + // The location of the corresponding label in the BUILD file in a + // machine-parseable form. + DEPRECATEDParseableLocation *Location `protobuf:"bytes,7,opt,name=DEPRECATED_parseable_location,json=DEPRECATEDParseableLocation" json:"DEPRECATED_parseable_location,omitempty"` + // Labels of files that are transitively subincluded in this BUILD file. This + // is present only when the SourceFile represents a BUILD file that + // subincludes other files. The subincluded file can be either a Python + // preprocessed build extension or a Skylark file. + Subinclude []string `protobuf:"bytes,3,rep,name=subinclude" json:"subinclude,omitempty"` + // Labels of package groups that are mentioned in the visibility declaration + // for this source file. + PackageGroup []string `protobuf:"bytes,4,rep,name=package_group,json=packageGroup" json:"package_group,omitempty"` + // Labels mentioned in the visibility declaration (including :__pkg__ and + // //visibility: ones) + VisibilityLabel []string `protobuf:"bytes,5,rep,name=visibility_label,json=visibilityLabel" json:"visibility_label,omitempty"` + // The package-level features enabled for this package. Only present if the + // SourceFile represents a BUILD file. + Feature []string `protobuf:"bytes,6,rep,name=feature" json:"feature,omitempty"` + // License attribute for the file. + License *License `protobuf:"bytes,8,opt,name=license" json:"license,omitempty"` + // True if the package contains an error. Only present if the SourceFile + // represents a BUILD file. + PackageContainsErrors *bool `protobuf:"varint,9,opt,name=package_contains_errors,json=packageContainsErrors" json:"package_contains_errors,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SourceFile) Reset() { *m = SourceFile{} } +func (m *SourceFile) String() string { return proto.CompactTextString(m) } +func (*SourceFile) ProtoMessage() {} +func (*SourceFile) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} } + +func (m *SourceFile) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *SourceFile) GetLocation() string { + if m != nil && m.Location != nil { + return *m.Location + } + return "" +} + +func (m *SourceFile) GetDEPRECATEDParseableLocation() *Location { + if m != nil { + return m.DEPRECATEDParseableLocation + } + return nil +} + +func (m *SourceFile) GetSubinclude() []string { + if m != nil { + return m.Subinclude + } + return nil +} + +func (m *SourceFile) GetPackageGroup() []string { + if m != nil { + return m.PackageGroup + } + return nil +} + +func (m *SourceFile) GetVisibilityLabel() []string { + if m != nil { + return m.VisibilityLabel + } + return nil +} + +func (m *SourceFile) GetFeature() []string { + if m != nil { + return m.Feature + } + return nil +} + +func (m *SourceFile) GetLicense() *License { + if m != nil { + return m.License + } + return nil +} + +func (m *SourceFile) GetPackageContainsErrors() bool { + if m != nil && m.PackageContainsErrors != nil { + return *m.PackageContainsErrors + } + return false +} + +// A file that is the output of a build rule. +type GeneratedFile struct { + // The name of the generated file (a label). + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + // The label of the target that generates the file. + GeneratingRule *string `protobuf:"bytes,2,req,name=generating_rule,json=generatingRule" json:"generating_rule,omitempty"` + // The path of the output file (not a label). + Location *string `protobuf:"bytes,3,opt,name=location" json:"location,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GeneratedFile) Reset() { *m = GeneratedFile{} } +func (m *GeneratedFile) String() string { return proto.CompactTextString(m) } +func (*GeneratedFile) ProtoMessage() {} +func (*GeneratedFile) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} } + +func (m *GeneratedFile) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *GeneratedFile) GetGeneratingRule() string { + if m != nil && m.GeneratingRule != nil { + return *m.GeneratingRule + } + return "" +} + +func (m *GeneratedFile) GetLocation() string { + if m != nil && m.Location != nil { + return *m.Location + } + return "" +} + +// A target from a blaze query execution. Similar to the Attribute message, +// the Discriminator is used to determine which field contains information. +// For any given type, only one of these can be populated in a single Target. +type Target struct { + // The type of target contained in the message. + Type *Target_Discriminator `protobuf:"varint,1,req,name=type,enum=blaze_query.Target_Discriminator" json:"type,omitempty"` + // If this target represents a rule, the rule is stored here. + Rule *Rule `protobuf:"bytes,2,opt,name=rule" json:"rule,omitempty"` + // A file that is not generated by the build system (version controlled + // or created by the test harness). + SourceFile *SourceFile `protobuf:"bytes,3,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` + // A generated file that is the output of a rule. + GeneratedFile *GeneratedFile `protobuf:"bytes,4,opt,name=generated_file,json=generatedFile" json:"generated_file,omitempty"` + // A package group. + PackageGroup *PackageGroup `protobuf:"bytes,5,opt,name=package_group,json=packageGroup" json:"package_group,omitempty"` + // An environment group. + EnvironmentGroup *EnvironmentGroup `protobuf:"bytes,6,opt,name=environment_group,json=environmentGroup" json:"environment_group,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Target) Reset() { *m = Target{} } +func (m *Target) String() string { return proto.CompactTextString(m) } +func (*Target) ProtoMessage() {} +func (*Target) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} } + +func (m *Target) GetType() Target_Discriminator { + if m != nil && m.Type != nil { + return *m.Type + } + return Target_RULE +} + +func (m *Target) GetRule() *Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (m *Target) GetSourceFile() *SourceFile { + if m != nil { + return m.SourceFile + } + return nil +} + +func (m *Target) GetGeneratedFile() *GeneratedFile { + if m != nil { + return m.GeneratedFile + } + return nil +} + +func (m *Target) GetPackageGroup() *PackageGroup { + if m != nil { + return m.PackageGroup + } + return nil +} + +func (m *Target) GetEnvironmentGroup() *EnvironmentGroup { + if m != nil { + return m.EnvironmentGroup + } + return nil +} + +// Container for all of the blaze query results. +type QueryResult struct { + // All of the targets returned by the blaze query. + Target []*Target `protobuf:"bytes,1,rep,name=target" json:"target,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *QueryResult) Reset() { *m = QueryResult{} } +func (m *QueryResult) String() string { return proto.CompactTextString(m) } +func (*QueryResult) ProtoMessage() {} +func (*QueryResult) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} } + +func (m *QueryResult) GetTarget() []*Target { + if m != nil { + return m.Target + } + return nil +} + +// Information about allowed rule classes for a specific attribute of a rule. +type AllowedRuleClassInfo struct { + Policy *AllowedRuleClassInfo_AllowedRuleClasses `protobuf:"varint,1,req,name=policy,enum=blaze_query.AllowedRuleClassInfo_AllowedRuleClasses" json:"policy,omitempty"` + // Rule class names of rules allowed in this attribute, e.g "cc_library", + // "py_binary". Only present if the allowed_rule_classes field is set to + // SPECIFIED. + AllowedRuleClass []string `protobuf:"bytes,2,rep,name=allowed_rule_class,json=allowedRuleClass" json:"allowed_rule_class,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *AllowedRuleClassInfo) Reset() { *m = AllowedRuleClassInfo{} } +func (m *AllowedRuleClassInfo) String() string { return proto.CompactTextString(m) } +func (*AllowedRuleClassInfo) ProtoMessage() {} +func (*AllowedRuleClassInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} } + +func (m *AllowedRuleClassInfo) GetPolicy() AllowedRuleClassInfo_AllowedRuleClasses { + if m != nil && m.Policy != nil { + return *m.Policy + } + return AllowedRuleClassInfo_ANY +} + +func (m *AllowedRuleClassInfo) GetAllowedRuleClass() []string { + if m != nil { + return m.AllowedRuleClass + } + return nil +} + +// This message represents a single attribute of a single rule. +type AttributeDefinition struct { + // Attribute name, i.e. "name", "srcs", "deps" + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + Type *Attribute_Discriminator `protobuf:"varint,2,req,name=type,enum=blaze_query.Attribute_Discriminator" json:"type,omitempty"` + Mandatory *bool `protobuf:"varint,3,req,name=mandatory" json:"mandatory,omitempty"` + // Only present for attributes of type LABEL and LABEL_LIST. + AllowedRuleClasses *AllowedRuleClassInfo `protobuf:"bytes,4,opt,name=allowed_rule_classes,json=allowedRuleClasses" json:"allowed_rule_classes,omitempty"` + Documentation *string `protobuf:"bytes,5,opt,name=documentation" json:"documentation,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *AttributeDefinition) Reset() { *m = AttributeDefinition{} } +func (m *AttributeDefinition) String() string { return proto.CompactTextString(m) } +func (*AttributeDefinition) ProtoMessage() {} +func (*AttributeDefinition) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{19} } + +func (m *AttributeDefinition) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *AttributeDefinition) GetType() Attribute_Discriminator { + if m != nil && m.Type != nil { + return *m.Type + } + return Attribute_INTEGER +} + +func (m *AttributeDefinition) GetMandatory() bool { + if m != nil && m.Mandatory != nil { + return *m.Mandatory + } + return false +} + +func (m *AttributeDefinition) GetAllowedRuleClasses() *AllowedRuleClassInfo { + if m != nil { + return m.AllowedRuleClasses + } + return nil +} + +func (m *AttributeDefinition) GetDocumentation() string { + if m != nil && m.Documentation != nil { + return *m.Documentation + } + return "" +} + +type RuleDefinition struct { + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + // Only contains documented attributes + Attribute []*AttributeDefinition `protobuf:"bytes,2,rep,name=attribute" json:"attribute,omitempty"` + Documentation *string `protobuf:"bytes,3,opt,name=documentation" json:"documentation,omitempty"` + // Only for build extensions: label to file that defines the extension + Label *string `protobuf:"bytes,4,opt,name=label" json:"label,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *RuleDefinition) Reset() { *m = RuleDefinition{} } +func (m *RuleDefinition) String() string { return proto.CompactTextString(m) } +func (*RuleDefinition) ProtoMessage() {} +func (*RuleDefinition) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{20} } + +func (m *RuleDefinition) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *RuleDefinition) GetAttribute() []*AttributeDefinition { + if m != nil { + return m.Attribute + } + return nil +} + +func (m *RuleDefinition) GetDocumentation() string { + if m != nil && m.Documentation != nil { + return *m.Documentation + } + return "" +} + +func (m *RuleDefinition) GetLabel() string { + if m != nil && m.Label != nil { + return *m.Label + } + return "" +} + +type BuildLanguage struct { + // Only contains documented rule definitions + Rule []*RuleDefinition `protobuf:"bytes,1,rep,name=rule" json:"rule,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *BuildLanguage) Reset() { *m = BuildLanguage{} } +func (m *BuildLanguage) String() string { return proto.CompactTextString(m) } +func (*BuildLanguage) ProtoMessage() {} +func (*BuildLanguage) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{21} } + +func (m *BuildLanguage) GetRule() []*RuleDefinition { + if m != nil { + return m.Rule + } + return nil +} + +type Location struct { + StartOffset *int32 `protobuf:"varint,1,opt,name=start_offset,json=startOffset" json:"start_offset,omitempty"` + StartLine *int32 `protobuf:"varint,2,opt,name=start_line,json=startLine" json:"start_line,omitempty"` + StartColumn *int32 `protobuf:"varint,3,opt,name=start_column,json=startColumn" json:"start_column,omitempty"` + EndOffset *int32 `protobuf:"varint,4,opt,name=end_offset,json=endOffset" json:"end_offset,omitempty"` + EndLine *int32 `protobuf:"varint,5,opt,name=end_line,json=endLine" json:"end_line,omitempty"` + EndColumn *int32 `protobuf:"varint,6,opt,name=end_column,json=endColumn" json:"end_column,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{22} } + +func (m *Location) GetStartOffset() int32 { + if m != nil && m.StartOffset != nil { + return *m.StartOffset + } + return 0 +} + +func (m *Location) GetStartLine() int32 { + if m != nil && m.StartLine != nil { + return *m.StartLine + } + return 0 +} + +func (m *Location) GetStartColumn() int32 { + if m != nil && m.StartColumn != nil { + return *m.StartColumn + } + return 0 +} + +func (m *Location) GetEndOffset() int32 { + if m != nil && m.EndOffset != nil { + return *m.EndOffset + } + return 0 +} + +func (m *Location) GetEndLine() int32 { + if m != nil && m.EndLine != nil { + return *m.EndLine + } + return 0 +} + +func (m *Location) GetEndColumn() int32 { + if m != nil && m.EndColumn != nil { + return *m.EndColumn + } + return 0 +} + +type MakeVarBinding struct { + Value *string `protobuf:"bytes,1,req,name=value" json:"value,omitempty"` + PlatformSetRegexp *string `protobuf:"bytes,2,req,name=platform_set_regexp,json=platformSetRegexp" json:"platform_set_regexp,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MakeVarBinding) Reset() { *m = MakeVarBinding{} } +func (m *MakeVarBinding) String() string { return proto.CompactTextString(m) } +func (*MakeVarBinding) ProtoMessage() {} +func (*MakeVarBinding) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{23} } + +func (m *MakeVarBinding) GetValue() string { + if m != nil && m.Value != nil { + return *m.Value + } + return "" +} + +func (m *MakeVarBinding) GetPlatformSetRegexp() string { + if m != nil && m.PlatformSetRegexp != nil { + return *m.PlatformSetRegexp + } + return "" +} + +type MakeVar struct { + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + Binding []*MakeVarBinding `protobuf:"bytes,2,rep,name=binding" json:"binding,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *MakeVar) Reset() { *m = MakeVar{} } +func (m *MakeVar) String() string { return proto.CompactTextString(m) } +func (*MakeVar) ProtoMessage() {} +func (*MakeVar) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{24} } + +func (m *MakeVar) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *MakeVar) GetBinding() []*MakeVarBinding { + if m != nil { + return m.Binding + } + return nil +} + +type GlobCriteria struct { + // List of includes (or items if this criteria did not come from a glob) + Include []string `protobuf:"bytes,1,rep,name=include" json:"include,omitempty"` + // List of exclude expressions + Exclude []string `protobuf:"bytes,2,rep,name=exclude" json:"exclude,omitempty"` + // Whether this message came from a glob + Glob *bool `protobuf:"varint,3,opt,name=glob" json:"glob,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *GlobCriteria) Reset() { *m = GlobCriteria{} } +func (m *GlobCriteria) String() string { return proto.CompactTextString(m) } +func (*GlobCriteria) ProtoMessage() {} +func (*GlobCriteria) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{25} } + +func (m *GlobCriteria) GetInclude() []string { + if m != nil { + return m.Include + } + return nil +} + +func (m *GlobCriteria) GetExclude() []string { + if m != nil { + return m.Exclude + } + return nil +} + +func (m *GlobCriteria) GetGlob() bool { + if m != nil && m.Glob != nil { + return *m.Glob + } + return false +} + +type Event struct { + Kind *Event_EventKind `protobuf:"varint,1,req,name=kind,enum=blaze_query.Event_EventKind" json:"kind,omitempty"` + DEPRECATEDLocation *Location `protobuf:"bytes,2,opt,name=DEPRECATED_location,json=DEPRECATEDLocation" json:"DEPRECATED_location,omitempty"` + Message *string `protobuf:"bytes,3,opt,name=message" json:"message,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Event) Reset() { *m = Event{} } +func (m *Event) String() string { return proto.CompactTextString(m) } +func (*Event) ProtoMessage() {} +func (*Event) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{26} } + +func (m *Event) GetKind() Event_EventKind { + if m != nil && m.Kind != nil { + return *m.Kind + } + return Event_ERROR +} + +func (m *Event) GetDEPRECATEDLocation() *Location { + if m != nil { + return m.DEPRECATEDLocation + } + return nil +} + +func (m *Event) GetMessage() string { + if m != nil && m.Message != nil { + return *m.Message + } + return "" +} + +func init() { + proto.RegisterType((*License)(nil), "blaze_query.License") + proto.RegisterType((*StringDictEntry)(nil), "blaze_query.StringDictEntry") + proto.RegisterType((*LabelDictUnaryEntry)(nil), "blaze_query.LabelDictUnaryEntry") + proto.RegisterType((*LabelListDictEntry)(nil), "blaze_query.LabelListDictEntry") + proto.RegisterType((*LabelKeyedStringDictEntry)(nil), "blaze_query.LabelKeyedStringDictEntry") + proto.RegisterType((*StringListDictEntry)(nil), "blaze_query.StringListDictEntry") + proto.RegisterType((*FilesetEntry)(nil), "blaze_query.FilesetEntry") + proto.RegisterType((*Attribute)(nil), "blaze_query.Attribute") + proto.RegisterType((*Attribute_SelectorEntry)(nil), "blaze_query.Attribute.SelectorEntry") + proto.RegisterType((*Attribute_Selector)(nil), "blaze_query.Attribute.Selector") + proto.RegisterType((*Attribute_SelectorList)(nil), "blaze_query.Attribute.SelectorList") + proto.RegisterType((*Rule)(nil), "blaze_query.Rule") + proto.RegisterType((*AttributeAspect)(nil), "blaze_query.AttributeAspect") + proto.RegisterType((*SkylarkAspect)(nil), "blaze_query.SkylarkAspect") + proto.RegisterType((*RuleSummary)(nil), "blaze_query.RuleSummary") + proto.RegisterType((*PackageGroup)(nil), "blaze_query.PackageGroup") + proto.RegisterType((*EnvironmentGroup)(nil), "blaze_query.EnvironmentGroup") + proto.RegisterType((*SourceFile)(nil), "blaze_query.SourceFile") + proto.RegisterType((*GeneratedFile)(nil), "blaze_query.GeneratedFile") + proto.RegisterType((*Target)(nil), "blaze_query.Target") + proto.RegisterType((*QueryResult)(nil), "blaze_query.QueryResult") + proto.RegisterType((*AllowedRuleClassInfo)(nil), "blaze_query.AllowedRuleClassInfo") + proto.RegisterType((*AttributeDefinition)(nil), "blaze_query.AttributeDefinition") + proto.RegisterType((*RuleDefinition)(nil), "blaze_query.RuleDefinition") + proto.RegisterType((*BuildLanguage)(nil), "blaze_query.BuildLanguage") + proto.RegisterType((*Location)(nil), "blaze_query.Location") + proto.RegisterType((*MakeVarBinding)(nil), "blaze_query.MakeVarBinding") + proto.RegisterType((*MakeVar)(nil), "blaze_query.MakeVar") + proto.RegisterType((*GlobCriteria)(nil), "blaze_query.GlobCriteria") + proto.RegisterType((*Event)(nil), "blaze_query.Event") + proto.RegisterEnum("blaze_query.FilesetEntry_SymlinkBehavior", FilesetEntry_SymlinkBehavior_name, FilesetEntry_SymlinkBehavior_value) + proto.RegisterEnum("blaze_query.Attribute_Discriminator", Attribute_Discriminator_name, Attribute_Discriminator_value) + proto.RegisterEnum("blaze_query.Attribute_Tristate", Attribute_Tristate_name, Attribute_Tristate_value) + proto.RegisterEnum("blaze_query.Target_Discriminator", Target_Discriminator_name, Target_Discriminator_value) + proto.RegisterEnum("blaze_query.AllowedRuleClassInfo_AllowedRuleClasses", AllowedRuleClassInfo_AllowedRuleClasses_name, AllowedRuleClassInfo_AllowedRuleClasses_value) + proto.RegisterEnum("blaze_query.Event_EventKind", Event_EventKind_name, Event_EventKind_value) +} + +func init() { proto.RegisterFile("build_proto/build.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 2621 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x59, 0x49, 0x73, 0xeb, 0xc6, + 0xf1, 0xff, 0x83, 0x3b, 0x9b, 0x1b, 0x34, 0xda, 0xf8, 0x36, 0x9b, 0x86, 0x9f, 0xff, 0xa6, 0x97, + 0x92, 0x6d, 0xe6, 0xd9, 0xe5, 0xd8, 0xc9, 0xab, 0x50, 0x24, 0x24, 0xd3, 0x8f, 0x26, 0x95, 0x21, + 0xf9, 0x1c, 0xe5, 0x82, 0x02, 0x89, 0x11, 0x35, 0x25, 0x10, 0x60, 0x00, 0x50, 0x11, 0x73, 0x4d, + 0xe5, 0xe0, 0x7c, 0x06, 0x1f, 0xf2, 0x2d, 0x72, 0xcd, 0x31, 0x1f, 0x21, 0xa9, 0x5c, 0x72, 0xcc, + 0x87, 0xc8, 0x21, 0x35, 0x0b, 0x48, 0x40, 0xa2, 0x2c, 0xf9, 0x39, 0xb9, 0xa4, 0x72, 0x61, 0x71, + 0x7a, 0xf9, 0x4d, 0xf7, 0x4c, 0x77, 0x4f, 0xcf, 0x00, 0xf6, 0xc7, 0x0b, 0x6a, 0x5b, 0xc6, 0xdc, + 0x73, 0x03, 0xf7, 0x03, 0xfe, 0xff, 0x80, 0xff, 0x47, 0x85, 0xb1, 0x6d, 0xfe, 0x86, 0x18, 0xbf, + 0x5a, 0x10, 0x6f, 0xa9, 0x7d, 0x09, 0xd9, 0x2e, 0x9d, 0x10, 0xc7, 0x27, 0xe8, 0x0d, 0x28, 0xda, + 0xe2, 0xaf, 0x11, 0x2c, 0xe7, 0xa4, 0xaa, 0xd4, 0x92, 0xf5, 0x3c, 0x2e, 0x48, 0xda, 0x70, 0x39, + 0x27, 0xe8, 0x31, 0xe4, 0xc9, 0xd5, 0x84, 0xcc, 0x03, 0xea, 0x3a, 0xd5, 0x04, 0xe7, 0xaf, 0x09, + 0xda, 0x8f, 0xa1, 0x32, 0x08, 0x3c, 0xea, 0x4c, 0xdb, 0x74, 0x12, 0xe8, 0x4e, 0xe0, 0x2d, 0x91, + 0x0a, 0xc9, 0x0b, 0xb2, 0xac, 0x2a, 0xb5, 0x44, 0x3d, 0x8f, 0xd9, 0x5f, 0xb4, 0x03, 0xe9, 0x4b, + 0xd3, 0x5e, 0x90, 0x6a, 0x82, 0xd3, 0xc4, 0x40, 0xfb, 0x29, 0x6c, 0x77, 0xcd, 0x31, 0xb1, 0x99, + 0xe6, 0xc8, 0x31, 0xbd, 0xe5, 0xf7, 0x53, 0xff, 0x09, 0x20, 0xae, 0xde, 0xa5, 0x7e, 0x70, 0xcf, + 0xc9, 0x93, 0x6b, 0xed, 0x16, 0x3c, 0xe0, 0xda, 0x2f, 0xc8, 0x92, 0x58, 0x3f, 0xc0, 0x03, 0xa1, + 0xfa, 0x6a, 0x36, 0xfc, 0x2d, 0x01, 0xc5, 0x23, 0x6a, 0x13, 0x9f, 0x48, 0xc5, 0x3d, 0xc8, 0xf8, + 0xee, 0xc2, 0x9b, 0x10, 0xa9, 0x2b, 0x47, 0xe8, 0x47, 0xb0, 0x6b, 0x11, 0x3f, 0xa0, 0x8e, 0xc9, + 0xd6, 0xdc, 0xb0, 0xa8, 0x47, 0x26, 0x81, 0xeb, 0x2d, 0xa5, 0x35, 0x3b, 0x11, 0x66, 0x3b, 0xe4, + 0xa1, 0x37, 0xa1, 0x74, 0xc6, 0xc0, 0x8d, 0xb9, 0x47, 0x7c, 0xe2, 0x04, 0xd5, 0x6c, 0x4d, 0xa9, + 0xe7, 0x70, 0x91, 0x13, 0x4f, 0x04, 0x0d, 0x21, 0x48, 0xb1, 0x71, 0x35, 0xc9, 0xed, 0xe2, 0xff, + 0x51, 0x15, 0xb2, 0xe4, 0x6a, 0x62, 0x2f, 0x2c, 0x52, 0x4d, 0x71, 0x72, 0x38, 0x44, 0xbf, 0x04, + 0xd5, 0x5f, 0xce, 0x6c, 0xea, 0x5c, 0x18, 0x63, 0x72, 0x6e, 0x5e, 0x52, 0xd7, 0xab, 0xa6, 0x6b, + 0x4a, 0xbd, 0xdc, 0x78, 0xe7, 0x20, 0x12, 0x60, 0x07, 0x51, 0xa7, 0x0e, 0x06, 0x42, 0xe3, 0x50, + 0x2a, 0x7c, 0x96, 0x6a, 0xf5, 0x4f, 0x4e, 0x71, 0xc5, 0x8f, 0x93, 0x59, 0x24, 0xfa, 0x81, 0x47, + 0xe7, 0xcc, 0xdc, 0x33, 0x7a, 0x55, 0xcd, 0xd4, 0x14, 0x16, 0x89, 0x9c, 0x76, 0xc2, 0x49, 0xda, + 0xfb, 0x50, 0xb9, 0x06, 0x86, 0x72, 0xc0, 0xe1, 0x54, 0x05, 0x55, 0xa0, 0xd0, 0xd6, 0xb1, 0x7e, + 0xa4, 0x63, 0xbd, 0xd7, 0xd2, 0xd5, 0x84, 0xf6, 0x8f, 0x7d, 0xc8, 0x37, 0x83, 0xc0, 0xa3, 0xe3, + 0x45, 0x40, 0x98, 0xa3, 0x8e, 0x39, 0x0b, 0x17, 0x96, 0xff, 0x47, 0xa7, 0xf0, 0xa4, 0xad, 0x9f, + 0x60, 0xbd, 0xd5, 0x1c, 0xea, 0x6d, 0x63, 0x6e, 0x7a, 0x3e, 0x31, 0xc7, 0x36, 0x31, 0x6c, 0x77, + 0xc2, 0xd7, 0xb2, 0x5a, 0xac, 0x29, 0xf5, 0x42, 0x63, 0x37, 0xe6, 0x5b, 0x57, 0x32, 0xf1, 0xa3, + 0xb5, 0xee, 0x49, 0xa8, 0x1a, 0x32, 0xd1, 0x47, 0xb0, 0x43, 0xae, 0xe6, 0x36, 0x9d, 0xd0, 0xc0, + 0x5e, 0x1a, 0xfe, 0x9c, 0x4c, 0xe8, 0x19, 0x25, 0x56, 0xb5, 0xc4, 0xf7, 0x60, 0x7b, 0xcd, 0x1b, + 0x84, 0x2c, 0x16, 0x23, 0x8e, 0x6b, 0x91, 0x79, 0x75, 0x87, 0xcb, 0x88, 0x01, 0xfa, 0x14, 0x52, + 0x3c, 0x31, 0xd9, 0x4e, 0x97, 0x1b, 0x4f, 0x63, 0xa6, 0xac, 0xbc, 0x3b, 0x68, 0x53, 0x7f, 0xe2, + 0xd1, 0x19, 0x8b, 0x01, 0xd7, 0xc3, 0x5c, 0x03, 0x3d, 0x82, 0x3c, 0x75, 0x02, 0x43, 0xc4, 0x5d, + 0xb2, 0xa6, 0xd4, 0xd3, 0x38, 0x47, 0x9d, 0xe0, 0x25, 0x1b, 0x87, 0xab, 0xed, 0x4c, 0x25, 0x3f, + 0xbd, 0x5e, 0x6d, 0x67, 0x2a, 0x44, 0xde, 0x84, 0xd2, 0xd8, 0x75, 0x6d, 0x62, 0x3a, 0x52, 0xa6, + 0x2c, 0xe2, 0x47, 0x12, 0x85, 0xd0, 0x11, 0x94, 0x03, 0x8f, 0xfa, 0x81, 0x19, 0x10, 0x29, 0x55, + 0xe1, 0xf1, 0xf0, 0xfa, 0x2d, 0x86, 0x0e, 0xa5, 0x30, 0x2e, 0x85, 0x6a, 0x02, 0xe7, 0x5d, 0xd8, + 0x92, 0xf6, 0xd8, 0xd4, 0x0f, 0x8d, 0xce, 0xf0, 0xe8, 0xab, 0xf8, 0xab, 0x14, 0x13, 0xb2, 0x07, + 0x90, 0x95, 0xf5, 0x89, 0x87, 0x74, 0xa1, 0xb1, 0x13, 0xdf, 0x20, 0xc1, 0xc3, 0xa1, 0x10, 0xfa, + 0x62, 0x85, 0x6d, 0xd1, 0x49, 0x88, 0x9d, 0xab, 0x25, 0xeb, 0x85, 0xc6, 0xe3, 0x98, 0xe6, 0xb5, + 0x32, 0x10, 0xce, 0xcc, 0x08, 0x62, 0xe6, 0x63, 0x40, 0x67, 0x22, 0xb4, 0xa3, 0x66, 0xe6, 0x39, + 0xd4, 0x83, 0x5b, 0x33, 0x00, 0xab, 0x52, 0x69, 0xed, 0x02, 0x86, 0x5d, 0x9b, 0x55, 0x1f, 0x01, + 0x13, 0x31, 0x0b, 0x38, 0x56, 0x7c, 0xf5, 0x6e, 0x56, 0x39, 0x8c, 0xec, 0x28, 0x4d, 0x60, 0x8e, + 0x60, 0x2f, 0xba, 0x84, 0x11, 0xd0, 0x02, 0x07, 0xad, 0x6d, 0xf0, 0x35, 0x8e, 0xba, 0xed, 0xc7, + 0x88, 0x02, 0xf6, 0x39, 0x94, 0xa6, 0xb6, 0x3b, 0x36, 0x26, 0x1e, 0x0d, 0x88, 0x47, 0xcd, 0xaa, + 0xba, 0xc1, 0xdd, 0x63, 0xdb, 0x1d, 0xb7, 0xa4, 0x00, 0x2e, 0x4e, 0x23, 0x23, 0xf4, 0x14, 0xca, + 0x2c, 0x0c, 0x23, 0xeb, 0xb5, 0x55, 0x4b, 0xd6, 0xd3, 0xb8, 0x48, 0x9d, 0xc8, 0x82, 0x8c, 0x60, + 0x4f, 0x2c, 0x08, 0x37, 0x7b, 0xc1, 0x4e, 0x03, 0x29, 0xbd, 0xbd, 0xc1, 0xf8, 0x0d, 0xc7, 0x06, + 0xde, 0xb6, 0x63, 0x44, 0x01, 0x7b, 0x0e, 0x4f, 0x04, 0xec, 0x05, 0x2b, 0xf3, 0xc6, 0xcd, 0x30, + 0xd8, 0xe3, 0xe8, 0xff, 0x7f, 0x13, 0x7d, 0xd3, 0xb9, 0x80, 0x1f, 0xd8, 0x1b, 0x58, 0x62, 0xa6, + 0x2f, 0xa0, 0xe4, 0x13, 0x9b, 0x97, 0x5e, 0xee, 0x6b, 0x75, 0x97, 0x87, 0xe6, 0x9b, 0xb7, 0xe4, + 0xc1, 0x40, 0xca, 0xb2, 0x15, 0xc0, 0x45, 0x3f, 0x32, 0x42, 0x5f, 0x82, 0x16, 0xa9, 0x4a, 0x51, + 0x93, 0xa3, 0xcb, 0x82, 0x6a, 0xc9, 0x7a, 0x11, 0xbf, 0xb6, 0x96, 0x5c, 0x1b, 0xb4, 0xf6, 0xff, + 0xe1, 0x37, 0x39, 0x28, 0x85, 0x53, 0x89, 0x23, 0x66, 0x07, 0xd2, 0xdc, 0x89, 0xaa, 0xc2, 0x33, + 0x5e, 0x0c, 0x50, 0x1d, 0x54, 0xea, 0x1b, 0x16, 0x39, 0x33, 0x17, 0x76, 0xb8, 0x34, 0x2a, 0x4f, + 0xf7, 0x32, 0xf5, 0xdb, 0x82, 0x2c, 0xfc, 0x8c, 0x55, 0x95, 0xc4, 0x1d, 0x55, 0x25, 0x79, 0x8f, + 0xaa, 0x92, 0xba, 0x57, 0x55, 0x49, 0xff, 0xaf, 0xaa, 0xfc, 0x77, 0x57, 0x95, 0xe2, 0x0f, 0xad, + 0x2a, 0xa5, 0xef, 0x55, 0x55, 0x2a, 0xff, 0xd1, 0xaa, 0xb2, 0xf5, 0xef, 0xaa, 0x2a, 0xf7, 0xab, + 0x05, 0xe5, 0x7b, 0xd5, 0x82, 0x6f, 0x15, 0xc8, 0x85, 0xb5, 0x00, 0x3d, 0x87, 0x2c, 0x71, 0x02, + 0x8f, 0x12, 0x9f, 0xb7, 0xfc, 0x85, 0x5b, 0x3b, 0x8b, 0x58, 0xf5, 0xc0, 0xa1, 0x12, 0xcb, 0xac, + 0x73, 0xf3, 0x7a, 0xc5, 0x48, 0xf0, 0x54, 0xae, 0x9c, 0x9b, 0xf1, 0x92, 0xf1, 0x14, 0xca, 0x8e, + 0x6b, 0xcc, 0xcc, 0x60, 0x72, 0x6e, 0x10, 0xcf, 0x73, 0x3d, 0x59, 0x17, 0x8a, 0x8e, 0xfb, 0x15, + 0x23, 0xea, 0x8c, 0xf6, 0xf0, 0x77, 0x0a, 0x14, 0xa3, 0x55, 0x71, 0xd5, 0xf9, 0x28, 0x3c, 0xf5, + 0xbf, 0x4f, 0xe7, 0xf3, 0x39, 0xe4, 0x88, 0x4d, 0x66, 0xc4, 0x09, 0x7c, 0xde, 0x70, 0x17, 0x6e, + 0x2d, 0x1c, 0xe1, 0x84, 0x78, 0xa5, 0xa0, 0xfd, 0x3e, 0x09, 0xa5, 0x18, 0x28, 0x2a, 0x40, 0xb6, + 0xd3, 0x1b, 0xea, 0xc7, 0x3a, 0x56, 0x15, 0x04, 0x90, 0x19, 0x0c, 0x71, 0xa7, 0x77, 0xac, 0x26, + 0x50, 0x1e, 0xd2, 0xdd, 0xe6, 0xa1, 0xde, 0x55, 0x93, 0x8c, 0xdc, 0x1f, 0x0d, 0x4f, 0x46, 0x43, + 0x35, 0xc5, 0x3a, 0x51, 0x21, 0x62, 0x74, 0x3b, 0x83, 0xa1, 0x9a, 0x46, 0x65, 0x00, 0x2e, 0x27, + 0xc6, 0x19, 0x26, 0x20, 0x84, 0x05, 0x21, 0x8b, 0x76, 0x40, 0x6d, 0x77, 0x98, 0xce, 0xe1, 0x68, + 0xd8, 0xe9, 0xf7, 0x8c, 0x81, 0x3e, 0x54, 0x73, 0x6c, 0xde, 0x6e, 0xa7, 0xa5, 0xf7, 0x06, 0xba, + 0x9a, 0x8f, 0x80, 0xb6, 0x3b, 0xad, 0xa1, 0x0a, 0x68, 0x0f, 0xd0, 0x51, 0xa7, 0xab, 0x0f, 0xf4, + 0xa1, 0xa1, 0xf7, 0x86, 0xf8, 0x54, 0x60, 0x15, 0xd0, 0x36, 0x54, 0xd6, 0x93, 0x09, 0xe1, 0x22, + 0x9b, 0x20, 0x62, 0x92, 0xa0, 0x96, 0xd8, 0x04, 0x87, 0xfd, 0x7e, 0x57, 0x6f, 0xf6, 0xd4, 0x32, + 0x2a, 0x42, 0x6e, 0x88, 0x3b, 0x83, 0x61, 0x73, 0xa8, 0xab, 0x15, 0xa4, 0x42, 0x51, 0xfa, 0x2c, + 0x70, 0x55, 0x26, 0x3c, 0xea, 0xbd, 0xe8, 0xf5, 0xbf, 0xee, 0xa9, 0x88, 0xe1, 0x89, 0x49, 0x18, + 0x92, 0x31, 0xea, 0x35, 0xf1, 0xa9, 0xba, 0x8d, 0xb6, 0xa0, 0x34, 0xd0, 0xbb, 0x7a, 0x6b, 0xd8, + 0x97, 0x5a, 0x3b, 0xe8, 0x11, 0xec, 0x0b, 0xc1, 0x17, 0xfa, 0xa9, 0xde, 0x36, 0xa2, 0x2e, 0xec, + 0xa2, 0x1a, 0x3c, 0x8e, 0x44, 0x77, 0x84, 0x27, 0x11, 0xb7, 0xb4, 0xb7, 0x20, 0x17, 0xd6, 0x76, + 0x94, 0x81, 0x44, 0xaf, 0xaf, 0xfe, 0x1f, 0xca, 0x42, 0xf2, 0x54, 0x1f, 0xa8, 0x0a, 0xeb, 0xfd, + 0x9b, 0xa3, 0x61, 0x5f, 0x4d, 0x68, 0x7f, 0x4c, 0x41, 0x0a, 0x2f, 0xec, 0xcd, 0x5d, 0xfe, 0x13, + 0x00, 0x6f, 0x61, 0x13, 0x63, 0x62, 0x9b, 0xbe, 0x2f, 0x6f, 0x4c, 0x79, 0x46, 0x69, 0x31, 0x02, + 0x7a, 0x08, 0xb9, 0x55, 0xbf, 0x2f, 0xe2, 0x72, 0x35, 0x46, 0xcf, 0x20, 0x6f, 0x86, 0xb1, 0xc2, + 0xef, 0x42, 0x85, 0xc6, 0xde, 0xe6, 0x48, 0xc2, 0x6b, 0xc1, 0xd5, 0x84, 0xd4, 0x99, 0x2f, 0x82, + 0x6a, 0x5a, 0xdc, 0x98, 0x19, 0xa5, 0xc3, 0x08, 0xe8, 0x75, 0x28, 0x70, 0xb6, 0xbb, 0x08, 0x18, + 0x5f, 0x1c, 0x47, 0x5c, 0xa3, 0xcf, 0x29, 0xe8, 0x6d, 0xa8, 0x84, 0x79, 0xe5, 0x93, 0x20, 0xa0, + 0xce, 0xb4, 0x9a, 0xe5, 0x42, 0x65, 0x49, 0x1e, 0x08, 0xea, 0xdd, 0xf7, 0x97, 0xdc, 0x2b, 0xdf, + 0x5f, 0xde, 0x85, 0xad, 0xf9, 0x62, 0x6c, 0xd3, 0x89, 0x31, 0x5e, 0x86, 0x59, 0x5e, 0xcd, 0x8b, + 0xfc, 0x16, 0x8c, 0xc3, 0xa5, 0x4c, 0x72, 0xe6, 0x2f, 0xf5, 0x0d, 0xff, 0x62, 0x69, 0x9b, 0xde, + 0x45, 0x15, 0xb8, 0x50, 0x9e, 0xfa, 0x03, 0x41, 0x40, 0xbf, 0x80, 0x07, 0x92, 0x67, 0xac, 0xd6, + 0xc8, 0x30, 0xd9, 0x95, 0x28, 0xf0, 0xe5, 0x21, 0xf2, 0x78, 0xf3, 0xa2, 0x36, 0xb9, 0x10, 0xde, + 0x97, 0xea, 0xd7, 0xe8, 0x3e, 0x6a, 0xc2, 0x93, 0x10, 0x99, 0x38, 0x97, 0xd4, 0x73, 0x1d, 0x96, + 0xc2, 0xc6, 0xb9, 0xe9, 0x9f, 0x1b, 0x13, 0xd7, 0x22, 0xfc, 0xfe, 0x96, 0xc7, 0x0f, 0xa5, 0x90, + 0xbe, 0x96, 0xf9, 0xc2, 0xf4, 0xcf, 0x5b, 0xae, 0x45, 0x34, 0x1b, 0x2a, 0xd7, 0x60, 0xd1, 0x5b, + 0x50, 0x5e, 0xdb, 0x19, 0x89, 0xa6, 0xd2, 0x8a, 0xda, 0x63, 0x61, 0xd5, 0x80, 0x8c, 0x70, 0x82, + 0x87, 0x54, 0xa1, 0xf1, 0x30, 0x7e, 0x10, 0x4a, 0x93, 0x85, 0x07, 0x52, 0x52, 0xfb, 0x56, 0x81, + 0x52, 0x8c, 0x83, 0x3e, 0x64, 0xf7, 0xc4, 0x80, 0x38, 0x3e, 0xbb, 0xd7, 0xb3, 0x03, 0xdd, 0x08, + 0xbb, 0x33, 0x36, 0x25, 0x5a, 0xf1, 0xd8, 0xc9, 0xcf, 0x4f, 0x10, 0xd6, 0x40, 0x91, 0xab, 0xb9, + 0xeb, 0x05, 0xc4, 0x12, 0xd6, 0x89, 0x88, 0x2e, 0x86, 0x44, 0x6e, 0x5c, 0x2c, 0x70, 0x93, 0xf7, + 0x0c, 0x5c, 0xed, 0xb7, 0x0a, 0x14, 0x58, 0x1a, 0x0d, 0x16, 0xb3, 0x99, 0xe9, 0x2d, 0xd1, 0x5b, + 0x90, 0x62, 0x61, 0xc9, 0x8d, 0x29, 0x34, 0xb6, 0x62, 0x00, 0x4c, 0x0e, 0x73, 0x36, 0xfa, 0x08, + 0xc0, 0x22, 0x73, 0xe2, 0x58, 0xc4, 0x99, 0x2c, 0x65, 0xc1, 0xdd, 0x20, 0x1c, 0x11, 0xfa, 0xae, + 0xa4, 0xd3, 0xfe, 0xae, 0x40, 0xf1, 0xc4, 0x9c, 0x5c, 0x98, 0x53, 0x72, 0xec, 0xb9, 0x8b, 0xf9, + 0xc6, 0xa4, 0x7e, 0x0f, 0xb6, 0x26, 0xae, 0x13, 0x98, 0xd4, 0x21, 0x96, 0x31, 0x17, 0xd2, 0xf2, + 0x71, 0x45, 0x5d, 0x31, 0x24, 0x0a, 0x7a, 0x06, 0x7b, 0xd4, 0xe1, 0x2f, 0x18, 0x2b, 0x59, 0x63, + 0xca, 0xa0, 0xe5, 0xb3, 0xc7, 0x4e, 0xc8, 0x8d, 0x4d, 0x7b, 0x67, 0x76, 0xa5, 0x5e, 0x35, 0xbb, + 0xb4, 0x31, 0xa8, 0x91, 0x60, 0xbc, 0xdd, 0xcb, 0x1a, 0x14, 0x22, 0x81, 0x2d, 0xfd, 0x8b, 0x92, + 0x50, 0x15, 0xb2, 0x61, 0x76, 0x0a, 0x5f, 0xc2, 0xa1, 0xf6, 0x4d, 0x12, 0x60, 0xc0, 0x9f, 0x8f, + 0x58, 0xec, 0x6c, 0x84, 0x8f, 0xee, 0x42, 0xe2, 0x5a, 0xe9, 0xbb, 0xd3, 0xfb, 0xec, 0x2b, 0xd7, + 0x96, 0xd7, 0x00, 0xfc, 0xc5, 0x58, 0xae, 0xb9, 0x34, 0x3b, 0x42, 0x61, 0x11, 0x1e, 0xdf, 0x25, + 0xf1, 0x0a, 0x55, 0x9c, 0x47, 0x77, 0xe7, 0x1d, 0x50, 0x2f, 0xa9, 0x4f, 0xc7, 0xd4, 0xa6, 0xc1, + 0x52, 0x26, 0x8d, 0x28, 0xb5, 0x95, 0x35, 0x5d, 0x64, 0x4c, 0x15, 0xb2, 0x67, 0xc4, 0x0c, 0x16, + 0x5e, 0xd8, 0xfb, 0x87, 0xc3, 0x68, 0xcf, 0x9f, 0xbb, 0x4f, 0xcf, 0xff, 0x09, 0xec, 0x87, 0x96, + 0xc9, 0x20, 0xf3, 0x45, 0x47, 0xe3, 0xcb, 0xda, 0xb8, 0x2b, 0xd9, 0x2d, 0xc9, 0xe5, 0xad, 0x8d, + 0xaf, 0x9d, 0x43, 0xe9, 0x98, 0x38, 0xc4, 0x33, 0x03, 0x62, 0xdd, 0xba, 0x1b, 0x6f, 0x43, 0x65, + 0x2a, 0x84, 0x58, 0x93, 0xc7, 0x13, 0x4f, 0xa4, 0x76, 0x79, 0x4d, 0xe6, 0x87, 0xdc, 0x77, 0x25, + 0xcf, 0x5f, 0x93, 0x90, 0x19, 0x9a, 0xde, 0x94, 0x04, 0xe8, 0xe3, 0x55, 0xff, 0x94, 0xa8, 0x97, + 0x1b, 0x6f, 0xc4, 0x3c, 0x13, 0x22, 0x1b, 0x9b, 0xa7, 0x30, 0xe9, 0x13, 0x7c, 0x41, 0x6e, 0x4d, + 0xfa, 0x4f, 0xa1, 0x20, 0x1e, 0x27, 0x0d, 0xf9, 0x7e, 0xc8, 0xa4, 0xf7, 0xe3, 0x35, 0x70, 0x15, + 0x7d, 0x18, 0xfc, 0x75, 0x24, 0x36, 0x21, 0x74, 0x88, 0x58, 0x42, 0x59, 0x24, 0x52, 0xbc, 0x80, + 0xc6, 0xd6, 0x0b, 0x97, 0xa6, 0xb1, 0xe5, 0x7b, 0x7e, 0x3d, 0x42, 0xd2, 0x1c, 0x21, 0x7e, 0x7b, + 0x88, 0x26, 0xf3, 0xb5, 0xe0, 0xf9, 0x12, 0xb6, 0xa2, 0x07, 0x86, 0xc0, 0xc8, 0x70, 0x8c, 0x27, + 0x31, 0x8c, 0xeb, 0x59, 0x8a, 0x55, 0x72, 0x8d, 0xc2, 0xf6, 0x36, 0xde, 0x2e, 0xe6, 0x20, 0x85, + 0x47, 0x5d, 0x5d, 0x3c, 0x49, 0x0e, 0xfa, 0x23, 0xdc, 0xd2, 0x0d, 0xd6, 0xa9, 0xa9, 0x09, 0x84, + 0xa0, 0x7c, 0xac, 0xf7, 0x74, 0xcc, 0x73, 0x8a, 0xd3, 0x92, 0xac, 0x69, 0x3a, 0x69, 0xb6, 0x5e, + 0x34, 0x8f, 0x75, 0xe3, 0x18, 0xf7, 0x47, 0x27, 0x6a, 0x0a, 0xed, 0xc2, 0x96, 0xde, 0x7b, 0xd9, + 0xc1, 0xfd, 0xde, 0x57, 0x7a, 0x6f, 0x28, 0xc9, 0x69, 0xed, 0x33, 0x28, 0xfc, 0x9c, 0x59, 0x85, + 0x89, 0xcf, 0x8e, 0xdd, 0xf7, 0x20, 0x13, 0xf0, 0x6d, 0x94, 0x1d, 0xfc, 0xf6, 0x86, 0x1d, 0xc6, + 0x52, 0x44, 0xfb, 0x93, 0x02, 0x3b, 0x4d, 0xdb, 0x76, 0x7f, 0x4d, 0x2c, 0x1c, 0xb6, 0x3e, 0x1d, + 0xe7, 0xcc, 0x45, 0x5d, 0xc8, 0xcc, 0x5d, 0x9b, 0x4e, 0x96, 0x32, 0x4e, 0x9e, 0xc5, 0x8f, 0x89, + 0x0d, 0x2a, 0x37, 0x88, 0xc4, 0xc7, 0x12, 0x03, 0xbd, 0x0f, 0xc8, 0x14, 0x5c, 0x23, 0xd6, 0x73, + 0xf1, 0xba, 0x6c, 0x5e, 0xd3, 0xd3, 0xde, 0x07, 0x74, 0x13, 0x8b, 0xf5, 0x77, 0xcd, 0xde, 0xa9, + 0xaa, 0xa0, 0x12, 0xe4, 0x07, 0x27, 0x7a, 0xab, 0x73, 0xd4, 0xd1, 0xdb, 0x6a, 0x42, 0xfb, 0xa7, + 0x02, 0xdb, 0xab, 0x63, 0xab, 0x4d, 0xce, 0xa8, 0x43, 0x79, 0x39, 0xd9, 0x94, 0x4b, 0xaf, 0xfe, + 0x6a, 0xfa, 0x18, 0xf2, 0x33, 0xd3, 0xb1, 0x4c, 0xfe, 0xbc, 0x9e, 0xac, 0x25, 0x58, 0x2f, 0xb3, + 0x22, 0xa0, 0x01, 0xec, 0xdc, 0xf4, 0x8f, 0xf8, 0x32, 0x82, 0xdf, 0xb8, 0x73, 0xed, 0x30, 0x32, + 0x6f, 0x3a, 0xfc, 0x14, 0x4a, 0x96, 0x3b, 0x59, 0xb0, 0x90, 0x12, 0x49, 0x2d, 0x1e, 0x63, 0xe3, + 0x44, 0xed, 0x0f, 0x0a, 0x94, 0x99, 0xd6, 0x1d, 0x9e, 0x3f, 0x8f, 0x9e, 0xfc, 0x89, 0x0d, 0xb7, + 0xdc, 0x0d, 0x4b, 0x18, 0x6d, 0x5e, 0x6f, 0x18, 0x93, 0xdc, 0x60, 0xcc, 0xfa, 0x15, 0x29, 0x15, + 0x79, 0x45, 0xd2, 0x7e, 0x06, 0xa5, 0xc3, 0x05, 0xb5, 0xad, 0xae, 0xe9, 0x4c, 0x17, 0xec, 0xe0, + 0xfd, 0x60, 0xd5, 0x40, 0x30, 0x3b, 0x1e, 0xdd, 0xa8, 0x25, 0x11, 0x13, 0xb8, 0xa0, 0xf6, 0x67, + 0x05, 0x72, 0xab, 0x73, 0x82, 0xbf, 0x26, 0x99, 0x5e, 0x60, 0xb8, 0x67, 0x67, 0x3e, 0x0f, 0x73, + 0xa5, 0x9e, 0xc6, 0x05, 0x4e, 0xeb, 0x73, 0x12, 0x6b, 0x3d, 0x85, 0x88, 0x4d, 0x9d, 0xf0, 0x39, + 0x2a, 0xcf, 0x29, 0x5d, 0xea, 0x90, 0x35, 0xc2, 0xc4, 0xb5, 0x17, 0x33, 0x47, 0xbe, 0x82, 0x0b, + 0x84, 0x16, 0x27, 0x31, 0x04, 0xe2, 0x58, 0xe1, 0x14, 0x29, 0x81, 0x40, 0x1c, 0x4b, 0x4e, 0xf0, + 0x00, 0x72, 0x8c, 0xcd, 0xe1, 0xd3, 0x9c, 0x99, 0x25, 0x8e, 0xc5, 0xc1, 0xa5, 0xa6, 0x84, 0xce, + 0xac, 0x34, 0x05, 0xb0, 0xf6, 0x12, 0xca, 0x5f, 0x99, 0x17, 0xe4, 0xa5, 0xe9, 0x1d, 0x52, 0xc7, + 0x62, 0xed, 0xfa, 0xea, 0x23, 0x90, 0x12, 0xf9, 0x86, 0x84, 0x0e, 0x60, 0x7b, 0x6e, 0x9b, 0xc1, + 0x99, 0xeb, 0xcd, 0x58, 0xbb, 0x6f, 0x78, 0x64, 0x4a, 0xae, 0xe6, 0xb2, 0xf4, 0x6f, 0x85, 0xac, + 0x01, 0x09, 0x30, 0x67, 0x68, 0x43, 0xc8, 0x4a, 0xdc, 0x8d, 0xfb, 0xff, 0x31, 0x64, 0xc7, 0x62, + 0x3e, 0xb9, 0xfb, 0xf1, 0x55, 0x8f, 0x9b, 0x84, 0x43, 0x59, 0xed, 0x25, 0x14, 0xa3, 0xaf, 0x2d, + 0xec, 0xcc, 0x0c, 0x0f, 0x68, 0xf1, 0x49, 0x30, 0x1c, 0x46, 0xbf, 0x0e, 0x25, 0xe2, 0x5f, 0x87, + 0x10, 0xa4, 0xa6, 0xb6, 0x3b, 0xe6, 0xab, 0x9c, 0xc3, 0xfc, 0xbf, 0xf6, 0x17, 0x05, 0xd2, 0xfa, + 0x25, 0xeb, 0x54, 0x3e, 0x84, 0xd4, 0x05, 0x75, 0x2c, 0x59, 0x66, 0xe2, 0x1d, 0x3f, 0x97, 0x10, + 0xbf, 0x2f, 0xa8, 0x63, 0x61, 0x2e, 0x89, 0x8e, 0x60, 0x3b, 0xd2, 0x82, 0xc4, 0x3a, 0x95, 0x5b, + 0x1b, 0x0f, 0xb4, 0xd6, 0x58, 0xc5, 0x51, 0x15, 0xb2, 0x33, 0xe2, 0xfb, 0xac, 0x43, 0x14, 0xc1, + 0x1c, 0x0e, 0xb5, 0xcf, 0x21, 0xbf, 0x9a, 0x94, 0xdd, 0xe6, 0x75, 0x8c, 0xfb, 0xec, 0x92, 0x5f, + 0x80, 0xec, 0xd7, 0x4d, 0xdc, 0x13, 0xb7, 0xfc, 0x1c, 0xa4, 0x3a, 0xbd, 0xa3, 0xbe, 0x9a, 0x64, + 0x57, 0xe4, 0x13, 0xdc, 0x3f, 0xc6, 0xfa, 0x60, 0xa0, 0xa6, 0x0e, 0x3f, 0x81, 0x67, 0x13, 0x77, + 0x76, 0x30, 0x75, 0xdd, 0xa9, 0x4d, 0x0e, 0x2c, 0x72, 0x19, 0xb8, 0xae, 0xed, 0x1f, 0x88, 0xaf, + 0xae, 0x36, 0x1d, 0x1f, 0x70, 0xd3, 0x1a, 0xe2, 0x03, 0xac, 0xf8, 0x6d, 0x98, 0x73, 0xfa, 0xaf, + 0x00, 0x00, 0x00, 0xff, 0xff, 0x1e, 0xc0, 0x9c, 0xa4, 0xa4, 0x1d, 0x00, 0x00, +} diff --git a/vendor/github.com/bazelbuild/buildtools/buildifier/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/buildifier/BUILD.bazel new file mode 100644 index 00000000000..3a8141b5ce9 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/buildifier/BUILD.bazel @@ -0,0 +1,13 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary") + +go_binary( + name = "buildifier", + srcs = ["buildifier.go"], + linkstamp = "main", + visibility = ["//visibility:public"], + deps = [ + "//build:go_default_library", + "//differ:go_default_library", + "//tables:go_default_library", + ], +) diff --git a/vendor/github.com/bazelbuild/buildtools/buildifier/README.md b/vendor/github.com/bazelbuild/buildtools/buildifier/README.md new file mode 100644 index 00000000000..c52d2882a7e --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/buildifier/README.md @@ -0,0 +1,18 @@ +# Buildifier + +buildifier is a tool for formatting bazel BUILD files with a standard convention. + +## Setup + +Build the tool: +* Checkout the repo and then either via `go install` or `bazel build //buildifier` +* If you already have 'go' installed, then build a binary via: + +`go get github.com/bazelbuild/buildtools/buildifier` + +## Usage + +Use buildifier to create standardized formatting for BUILD files in the +same way that clang-format is used for source files. + +`$ buildifier -showlog -mode=check $(find . -type f \( -iname BUILD -or -iname BUILD.bazel \))` diff --git a/vendor/github.com/bazelbuild/buildtools/buildifier/buildifier.go b/vendor/github.com/bazelbuild/buildtools/buildifier/buildifier.go new file mode 100644 index 00000000000..85575491b5c --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/buildifier/buildifier.go @@ -0,0 +1,374 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Buildifier, a tool to parse and format BUILD files. +package main + +import ( + "bytes" + "flag" + "fmt" + "io/ioutil" + "os" + "runtime" + "sort" + "strings" + + "github.com/bazelbuild/buildtools/build" + "github.com/bazelbuild/buildtools/differ" + "github.com/bazelbuild/buildtools/tables" +) + +var buildifierVersion = "redacted" +var buildScmRevision = "redacted" + +var ( + // Undocumented; for debugging. + showlog = flag.Bool("showlog", false, "show log in check mode") + + vflag = flag.Bool("v", false, "print verbose information on standard error") + dflag = flag.Bool("d", false, "alias for -mode=diff") + mode = flag.String("mode", "", "formatting mode: check, diff, or fix (default fix)") + path = flag.String("path", "", "assume BUILD file has this path relative to the workspace directory") + tablesPath = flag.String("tables", "", "path to JSON file with custom table definitions which will replace the built-in tables") + addTablesPath = flag.String("add_tables", "", "path to JSON file with custom table definitions which will be merged with the built-in tables") + version = flag.Bool("version", false, "Print the version of buildifier") + + // Debug flags passed through to rewrite.go + allowSort = stringList("allowsort", "additional sort contexts to treat as safe") + disable = stringList("buildifier_disable", "list of buildifier rewrites to disable") + + // Experimental flags + formatBzlFiles = flag.Bool("format_bzl", false, "format bzl-specific blocks (experimental)") +) + +func stringList(name, help string) func() []string { + f := flag.String(name, "", help) + return func() []string { + return strings.Split(*f, ",") + } +} + +func usage() { + fmt.Fprintf(os.Stderr, `usage: buildifier [-d] [-v] [-mode=mode] [-path=path] [files...] + +Buildifier applies a standard formatting to the named BUILD files. +The mode flag selects the processing: check, diff, fix, or print_if_changed. +In check mode, buildifier prints a list of files that need reformatting. +In diff mode, buildifier shows the diffs that it would make. +In fix mode, buildifier updates the files that need reformatting and, +if the -v flag is given, prints their names to standard error. +In print_if_changed mode, buildifier shows the file contents it would write. +The default mode is fix. -d is an alias for -mode=diff. + +If no files are listed, buildifier reads a BUILD file from standard input. In +fix mode, it writes the reformatted BUILD file to standard output, even if no +changes are necessary. + +Buildifier's reformatting depends in part on the path to the file relative +to the workspace directory. Normally buildifier deduces that path from the +file names given, but the path can be given explicitly with the -path +argument. This is especially useful when reformatting standard input, +or in scripts that reformat a temporary copy of a file. +`) + os.Exit(2) +} + +func main() { + flag.Usage = usage + flag.Parse() + args := flag.Args() + + if *version { + fmt.Printf("buildifier version: %s \n", buildifierVersion) + fmt.Printf("buildifier scm revision: %s \n", buildScmRevision) + + if len(args) == 0 { + os.Exit(0) + } + } + + // Pass down debug flags into build package + build.DisableRewrites = disable() + build.AllowSort = allowSort() + + tables.FormatBzlFiles = *formatBzlFiles + + if *dflag { + if *mode != "" { + fmt.Fprintf(os.Stderr, "buildifier: cannot specify both -d and -mode flags\n") + os.Exit(2) + } + *mode = "diff" + } + + // Check mode. + switch *mode { + default: + fmt.Fprintf(os.Stderr, "buildifier: unrecognized mode %s; valid modes are check, diff, fix\n", *mode) + os.Exit(2) + + case "": + *mode = "fix" + + case "check", "diff", "fix", "print_if_changed": + // ok + } + + // If the path flag is set, must only be formatting a single file. + // It doesn't make sense for multiple files to have the same path. + if (*path != "" || *mode == "print_if_changed") && len(args) > 1 { + fmt.Fprintf(os.Stderr, "buildifier: can only format one file when using -path flag or -mode=print_if_changed\n") + os.Exit(2) + } + + if *tablesPath != "" { + if err := tables.ParseAndUpdateJSONDefinitions(*tablesPath, false); err != nil { + fmt.Fprintf(os.Stderr, "buildifier: failed to parse %s for -tables: %s\n", *tablesPath, err) + os.Exit(2) + } + } + + if *addTablesPath != "" { + if err := tables.ParseAndUpdateJSONDefinitions(*addTablesPath, true); err != nil { + fmt.Fprintf(os.Stderr, "buildifier: failed to parse %s for -add_tables: %s\n", *addTablesPath, err) + os.Exit(2) + } + } + + diff = differ.Find() + + if len(args) == 0 || (len(args) == 1 && args[0] == "-") { + // Read from stdin, write to stdout. + data, err := ioutil.ReadAll(os.Stdin) + if err != nil { + fmt.Fprintf(os.Stderr, "buildifier: reading stdin: %v\n", err) + os.Exit(2) + } + if *mode == "fix" { + *mode = "pipe" + } + processFile("stdin", data) + } else { + processFiles(args) + } + + diff.Run() + + for _, file := range toRemove { + os.Remove(file) + } + + os.Exit(exitCode) +} + +func processFiles(files []string) { + // Decide how many file reads to run in parallel. + // At most 100, and at most one per 10 input files. + nworker := 100 + if n := (len(files) + 9) / 10; nworker > n { + nworker = n + } + runtime.GOMAXPROCS(nworker + 1) + + // Start nworker workers reading stripes of the input + // argument list and sending the resulting data on + // separate channels. file[k] is read by worker k%nworker + // and delivered on ch[k%nworker]. + type result struct { + file string + data []byte + err error + } + ch := make([]chan result, nworker) + for i := 0; i < nworker; i++ { + ch[i] = make(chan result, 1) + go func(i int) { + for j := i; j < len(files); j += nworker { + file := files[j] + data, err := ioutil.ReadFile(file) + ch[i] <- result{file, data, err} + } + }(i) + } + + // Process files. The processing still runs in a single goroutine + // in sequence. Only the reading of the files has been parallelized. + // The goal is to optimize for runs where most files are already + // formatted correctly, so that reading is the bulk of the I/O. + for i, file := range files { + res := <-ch[i%nworker] + if res.file != file { + fmt.Fprintf(os.Stderr, "buildifier: internal phase error: got %s for %s", res.file, file) + os.Exit(3) + } + if res.err != nil { + fmt.Fprintf(os.Stderr, "buildifier: %v\n", res.err) + exitCode = 3 + continue + } + processFile(file, res.data) + } +} + +// exitCode is the code to use when exiting the program. +// The codes used by buildifier are: +// +// 0: success, everything went well +// 1: syntax errors in input +// 2: usage errors: invoked incorrectly +// 3: unexpected runtime errors: file I/O problems or internal bugs +// 4: check mode failed (reformat is needed) +var exitCode = 0 + +// toRemove is a list of files to remove before exiting. +var toRemove []string + +// diff is the differ to use when *mode == "diff". +var diff *differ.Differ + +// processFile processes a single file containing data. +// It has been read from filename and should be written back if fixing. +func processFile(filename string, data []byte) { + defer func() { + if err := recover(); err != nil { + fmt.Fprintf(os.Stderr, "buildifier: %s: internal error: %v\n", filename, err) + exitCode = 3 + } + }() + + f, err := build.Parse(filename, data) + if err != nil { + // Do not use buildifier: prefix on this error. + // Since it is a parse error, it begins with file:line: + // and we want that to be the first thing in the error. + fmt.Fprintf(os.Stderr, "%v\n", err) + if exitCode < 1 { + exitCode = 1 + } + return + } + + if *path != "" { + f.Path = *path + } + beforeRewrite := build.Format(f) + var info build.RewriteInfo + build.Rewrite(f, &info) + ndata := build.Format(f) + + switch *mode { + case "check": + // check mode: print names of files that need formatting. + if !bytes.Equal(data, ndata) { + // Print: + // name # list of what changed + reformat := "" + if !bytes.Equal(data, beforeRewrite) { + reformat = " reformat" + } + log := "" + if len(info.Log) > 0 && *showlog { + sort.Strings(info.Log) + var uniq []string + last := "" + for _, s := range info.Log { + if s != last { + last = s + uniq = append(uniq, s) + } + } + log = " " + strings.Join(uniq, " ") + } + fmt.Printf("%s #%s %s%s\n", filename, reformat, &info, log) + exitCode = 4 + } + return + + case "diff": + // diff mode: run diff on old and new. + if bytes.Equal(data, ndata) { + return + } + outfile, err := writeTemp(ndata) + if err != nil { + fmt.Fprintf(os.Stderr, "buildifier: %v\n", err) + exitCode = 3 + return + } + infile := filename + if filename == "" { + // data was read from standard filename. + // Write it to a temporary file so diff can read it. + infile, err = writeTemp(data) + if err != nil { + fmt.Fprintf(os.Stderr, "buildifier: %v\n", err) + exitCode = 3 + return + } + } + diff.Show(infile, outfile) + + case "pipe": + // pipe mode - reading from stdin, writing to stdout. + // ("pipe" is not from the command line; it is set above in main.) + os.Stdout.Write(ndata) + return + + case "fix": + // fix mode: update files in place as needed. + if bytes.Equal(data, ndata) { + return + } + + err := ioutil.WriteFile(filename, ndata, 0666) + if err != nil { + fmt.Fprintf(os.Stderr, "buildifier: %s\n", err) + exitCode = 3 + return + } + + if *vflag { + fmt.Fprintf(os.Stderr, "fixed %s\n", filename) + } + case "print_if_changed": + if bytes.Equal(data, ndata) { + return + } + + if _, err := os.Stdout.Write(ndata); err != nil { + fmt.Fprintf(os.Stderr, "buildifier: error writing output: %v\n", err) + exitCode = 3 + return + } + } +} + +// writeTemp writes data to a temporary file and returns the name of the file. +func writeTemp(data []byte) (file string, err error) { + f, err := ioutil.TempFile("", "buildifier-tmp-") + if err != nil { + return "", fmt.Errorf("creating temporary file: %v", err) + } + name := f.Name() + toRemove = append(toRemove, name) + defer f.Close() + _, err = f.Write(data) + if err != nil { + return "", fmt.Errorf("writing temporary file: %v", err) + } + return name, nil +} diff --git a/vendor/github.com/bazelbuild/buildtools/buildozer/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/buildozer/BUILD.bazel new file mode 100644 index 00000000000..5a359ea3847 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/buildozer/BUILD.bazel @@ -0,0 +1,17 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = ["main.go"], + visibility = ["//visibility:private"], + deps = [ + "//edit:go_default_library", + "//tables:go_default_library", + ], +) + +go_binary( + name = "buildozer", + library = ":go_default_library", + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/buildozer/README.md b/vendor/github.com/bazelbuild/buildtools/buildozer/README.md new file mode 100644 index 00000000000..47095f7b870 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/buildozer/README.md @@ -0,0 +1,267 @@ +# Buildozer + +Buildozer is a command line tool to rewrite multiple +[Bazel](https://github.com/bazelbuild/bazel) BUILD files using +standard commands. + +## Dependencies + +1. Protobuf go runtime: to download +`go get -u github.com/golang/protobuf/{proto,protoc-gen-go}` + + +## Installation + +1. Change directory to the buildifier/buildozer + +```bash +gopath=$(go env GOPATH) +cd $gopath/src/github.com/bazelbuild/buildtools/buildozer +``` + +2. Install + +```bash +go install +``` + +## Usage + +```shell +buildozer [OPTIONS] ['command args' | -f FILE ] label-list +``` + +Here, `label-list` is a comma-separated list of Bazel labels, for example +`//path/to/pkg1:rule1, //path/to/pkg2:rule2`. Buildozer reads commands from +`FILE` (`-` for stdin (format: `|`-separated command line arguments to buildozer, +excluding flags)) + +You should specify at least one command and one target. Buildozer will execute +all commands on all targets. Commands are executed in order, files are processed +in parallel. + +### Targets + +Targets look like Bazel labels, but there can be some differences in presence of +macros. + + * Use the label notation to refer to a rule: `//buildtools/buildozer:edit` + * Use the `__pkg__` suffix to refer to the package declaration: + `//buildtools/buildozer:__pkg__` + * Use an asterisk to refer to all rules in a file: `//pkg:*` + * Use `...` to refer to all descendant BUILD files in a directory: `//pkg/...:*` + * Use percent to refer to all rules of a certain kind: `//pkg:%java_library` + * Use percent-and-number to refer to a rule that begins at a certain line: + `//pkg:%123`. + * Use `-` for the package name if you want to process standard input stream + instead of a file: `-:all_tests`. + +### Options + +OPTIONS include the following options: + + * `-stdout` : write changed BUILD file to stdout + * `-buildifier` : format output using a specific buildifier binary. If empty, use built-in formatter. + * `-k` : apply all commands, even if there are failures + * `-quiet` : suppress informational messages + * `-shorten_labels` : convert added labels to short form, e.g. //foo:bar => :bar + * `-types`: Filter the targets, keeping only those of the given types, e.g. + `buildozer -types go_library,go_binary 'print rule' '//buildtools/buildozer:*'` + * `-eol-comments=false`: When adding new comments, put them on a separate line. + +See `buildozer -help` for the full list. + +### Edit commands + +Buildozer supports the following commands(`'command args'`): + + * `add `: Adds value(s) to a list attribute of a rule. If a + value is already present in the list, it is not added. + * `new_load `: Add a load statement for the given path, + importing the symbols. Before using this, make sure to run + `buildozer 'fix movePackageToTop'`. Afterwards, consider running + `buildozer 'fix unusedLoads'`. + * `comment ? ? `: Add a comment to a rule, an attribute, + or a specific value in a list. Spaces in the comment should be escaped with + backslashes. + * `print_comment ? ?` + * `delete`: Delete a rule. + * `fix ?`: Apply a fix. + * `move `: Moves `value(s)` from the list `old_attr` + to the list `new_attr`. The wildcard `*` matches all values. + * `new [(before|after) ]`: Add a + new rule at the end of the BUILD file (before/after ``). + * `print ` + * `remove `: Removes attribute `attr`. + * `remove `: Removes `value(s)` from the list `attr`. The + wildcard `*` matches all attributes. Lists containing none of the `value(s)` are + not modified. + * `rename `: Rename the `old_attr` to `new_attr` which must + not yet exist. + * `replace `: Replaces `old_value` with `new_value` + in the list `attr`. Wildcard `*` matches all attributes. Lists not containing + `old_value` are not modified. + * `set `: Sets the value of an attribute. If the attribute + was already present, its old value is replaced. + * `set_if_absent `: Sets the value of an attribute. If the + attribute was already present, no action is taken. + * `set kind `: Set the target type to value. + * `copy `: Copies the value of `attr` between rules. If it + exists in the `to_rule`, it will be overwritten. + * `copy_no_overwrite `: Copies the value of `attr` between + rules. If it exists in the `to_rule`, no action is taken. + +Here, `` represents an attribute (being `add`ed/`rename`d/`delete`d etc.), +e.g.: `srcs`, `` represents values of the attribute and so on. +A '?' indicates that the preceding argument is optional. + +The fix command without a fix specified applied to all eligible fixes. +Use `//path/to/pkg:__pkg__` as label for file level changes like `new_load` and +`new_rule`. +A transformation can be applied to all rules of a particular kind by using +`%rule_kind` at the end of the label(see examples below). + +#### Examples + +```bash +# Edit //pkg:rule and //pkg:rule2, and add a dependency on //base +buildozer 'add deps //base' //pkg:rule //pkg:rule2 + +# A load for a skylark file in //pkg +buildozer 'new_load /tools/build_rules/build_test build_test' //pkg:__pkg__ + +# Change the default_visibility to public for the package //pkg +buildozer 'set default_visibility //visibility:public' //pkg:__pkg__ + +# Change all gwt_module targets to java_library in the package //pkg +buildozer 'set kind java_library' //pkg:%gwt_module + +# Replace the dependency on pkg_v1 with a dependency on pkg_v2 +buildozer 'replace deps //pkg_v1 //pkg_v2' //pkg:rule + +# Delete the dependency on foo in every cc_library in the package +buildozer 'remove deps foo' //pkg:%cc_library + +# Delete the testonly attribute in every rule in the package +buildozer 'remove testonly' '//pkg:*' + +# Add a comment to the timeout attribute of //pkg:rule_test +buildozer 'comment timeout Delete\ this\ after\ 2015-12-31.' //pkg:rule_test + +# Add a new rule at the end of the file +buildozer 'new java_library foo' //pkg:__pkg__ + +# Add a cc_binary rule named new_bin before the rule named tests +buildozer 'new cc_binary new_bin before tests' //:__pkg__ + +# Copy an attribute from `protolib` to `py_protolib`. +buildozer 'copy testonly protolib' //pkg:py_protolib + +# Set two attributes in the same rule +buildozer 'set compile 1' 'set srcmap 1' //pkg:rule + +# Make a default explicit in all soy_js rules in a package +buildozer 'set_if_absent allowv1syntax 1' //pkg:%soy_js + +# Add an attribute new_attr with value "def_val" to all cc_binary rules +# Note that special characters will automatically be escaped in the string +buildozer 'add new_attr def_val' //:%cc_binary +``` + +### Print commands + +They work just like the edit commands. Expect a return code of 3 as they are not +modifying any file. + + * `print `: For each target, prints the value of the attributes + (see below). + * `print_comment ? ?`: Prints a comment associated with a rule, + an attribute or a specific value in a list. + +The print command prints the value of the attributes. If a target doesn't have +the attribute, a warning is printed on stderr. + +There are some special attributes in the `print` command: + + * `kind`: displays the name of the function + * `label`: the fully qualified label + * `rule`: the entire rule definition + * `startline`: the line number on which the rule begins in the BUILD file + * `endline`: the line number on which the rule ends in the BUILD file + +#### Examples + +```shell +# Print the kind of a target +buildozer 'print kind' base # output: cc_library + +# Print the name of all cc_library in //base +buildozer 'print name' base:%cc_library + +# Get the default visibility of the //base package +buildozer 'print default_visibility' base:%package + +# Print labels of cc_library targets in //base that have a deps attribute +buildozer 'print label deps' base:%cc_library 2>/dev/null | cut -d' ' -f1 + +# Print the list of labels in //base that explicitly set the testonly attribute: +buildozer 'print label testonly' 'base:*' 2>/dev/null + +# Print the entire definition (including comments) of the //base:heapcheck rule: +buildozer 'print rule' //base:heapcheck +``` + +## Converting labels + +Buildozer works at the syntax-level. It doesn't evaluate the BUILD files. If you +need to query the information Bazel has, please use `bazel query`. If you have a +list of Bazel labels, chances are that some of them are generated by BUILD +extensions. Labels in Buildozer are slightly different from labels in Bazel. +Bazel cares about the generated code, while Buildozer looks at the BUILD file +before macro expansion. + +To see the expanded BUILD files, try: + +```shell +bazel query --output=build //path/to/BUILD +``` + +## Do multiple changes at once + +Use `buildozer -f ` to load a list of commands from a file. The usage is +just like arguments on the command-line, except that arguments are separated by +`|`. + +```shell +$ cat /tmp/cmds +new cc_library foo|//buildtools/buildozer/BUILD +add deps //base //strings|add srcs foo.cc|//buildtools/buildozer:foo +add deps :foo|//buildtools/buildozer + +$ buildozer -f /tmp/cmds +fixed //buildtools/buildozer/BUILD +``` + +The list of commands will typically be generated and can be large. This is +efficient: Commands are grouped so that each file is modified once. Files are +processed in parallel. + +## Error code + +The return code is: + + * `0` on success, if changes were made + * `1` when there is a usage error + * `2` when at least one command has failed + * `3` on success, when no changes were made + +## Source Structure + + * `buildozer/main.go` : Entry point for the buildozer binary + * `edit/buildozer.go` : Implementation of functions for the buildozer commands + * `edit/edit.go`: Library functions to perform various operations on ASTs. These + * functions are called by the impl functions in buildozer.go + * `edit/fix.go`: Functions for various fixes for the `buildozer 'fix '` + command, like cleaning unused loads, changing labels to canonical notation, etc. + * `edit/types.go`: Type information for attributes + diff --git a/vendor/github.com/bazelbuild/buildtools/buildozer/main.go b/vendor/github.com/bazelbuild/buildtools/buildozer/main.go new file mode 100644 index 00000000000..58aa5efed89 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/buildozer/main.go @@ -0,0 +1,96 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Entry-point for Buildozer binary. + +package main + +import ( + "flag" + "fmt" + "os" + "strings" + + "github.com/bazelbuild/buildtools/edit" + "github.com/bazelbuild/buildtools/tables" +) + +var ( + stdout = flag.Bool("stdout", false, "write changed BUILD file to stdout") + buildifier = flag.String("buildifier", "", "format output using a specific buildifier binary. If empty, use built-in formatter") + parallelism = flag.Int("P", 0, "number of cores to use for concurrent actions") + numio = flag.Int("numio", 200, "number of concurrent actions") + commandsFile = flag.String("f", "", "file name to read commands from, use '-' for stdin (format:|-separated command line arguments to buildozer, excluding flags)") + keepGoing = flag.Bool("k", false, "apply all commands, even if there are failures") + filterRuleTypes = stringList("types", "comma-separated list of rule types to change, the default empty list means all rules") + preferEOLComments = flag.Bool("eol-comments", true, "when adding a new comment, put it on the same line if possible") + rootDir = flag.String("root_dir", "", "If present, use this folder rather than $PWD to find the root directory.") + quiet = flag.Bool("quiet", false, "suppress informational messages") + editVariables = flag.Bool("edit-variables", false, "For attributes that simply assign a variable (e.g. hdrs = LIB_HDRS), edit the build variable instead of appending to the attribute.") + isPrintingProto = flag.Bool("output_proto", false, "output serialized devtools.buildozer.Output protos instead of human-readable strings.") + tablesPath = flag.String("tables", "", "path to JSON file with custom table definitions which will replace the built-in tables") + addTablesPath = flag.String("add_tables", "", "path to JSON file with custom table definitions which will be merged with the built-in tables") + + shortenLabelsFlag = flag.Bool("shorten_labels", true, "convert added labels to short form, e.g. //foo:bar => :bar") + deleteWithComments = flag.Bool("delete_with_comments", true, "If a list attribute should be deleted even if there is a comment attached to it") +) + +func stringList(name, help string) func() []string { + f := flag.String(name, "", help) + return func() []string { + if *f == "" { + return nil + } + res := strings.Split(*f, ",") + for i := range res { + res[i] = strings.TrimSpace(res[i]) + } + return res + } +} + +func main() { + flag.Parse() + + if *tablesPath != "" { + if err := tables.ParseAndUpdateJSONDefinitions(*tablesPath, false); err != nil { + fmt.Fprintf(os.Stderr, "buildifier: failed to parse %s for -tables: %s\n", *tablesPath, err) + os.Exit(2) + } + } + + + if *addTablesPath != "" { + if err := tables.ParseAndUpdateJSONDefinitions(*addTablesPath, true); err != nil { + fmt.Fprintf(os.Stderr, "buildifier: failed to parse %s for -add_tables: %s\n", *addTablesPath, err) + os.Exit(2) + } + } + + edit.ShortenLabelsFlag = *shortenLabelsFlag + edit.DeleteWithComments = *deleteWithComments + edit.Opts = edit.Options{ + Stdout: *stdout, + Buildifier: *buildifier, + Parallelism: *parallelism, + NumIO: *numio, + CommandsFile: *commandsFile, + KeepGoing: *keepGoing, + FilterRuleTypes: filterRuleTypes(), + PreferEOLComments: *preferEOLComments, + RootDir: *rootDir, + Quiet: *quiet, + EditVariables: *editVariables, + IsPrintingProto: *isPrintingProto, + } + os.Exit(edit.Buildozer(flag.Args())) +} diff --git a/vendor/github.com/bazelbuild/buildtools/config/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/config/BUILD.bazel new file mode 100644 index 00000000000..15e0ca82c39 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/config/BUILD.bazel @@ -0,0 +1,7 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["config.go"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/config/config.go b/vendor/github.com/bazelbuild/buildtools/config/config.go new file mode 100644 index 00000000000..afa976580bf --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/config/config.go @@ -0,0 +1,30 @@ +/* +Copyright 2017 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Package config provides environment specific configuration elements to unused_deps +package config + +var ( + // DefaultBuildTool should be used to build and extract deps + DefaultBuildTool = "bazel" + // BuildToolHelp message for above + BuildToolHelp = "the build executable (like bazel)" + // ExtraActionFileNameHelp help for extra actions + ExtraActionFileNameHelp = "When specified, just prints suspected unused deps." + // DefaultBinDir to look for outputs + DefaultBinDir = "bazel-bin" + // DefaultExtraBuildFlags is internal-only + DefaultExtraBuildFlags = []string{} +) diff --git a/vendor/github.com/bazelbuild/buildtools/deps_proto/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/deps_proto/BUILD.bazel new file mode 100644 index 00000000000..7441ae88965 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/deps_proto/BUILD.bazel @@ -0,0 +1,21 @@ +load("@io_bazel_rules_go//proto:go_proto_library.bzl", "go_proto_library") +load("//build:build_defs.bzl", "genfile_check_test") + +genfile_check_test( + src = "deps.gen.pb.go", + gen = "deps.pb.go", +) + +genrule( + name = "copy_and_fix", + srcs = ["@io_bazel//src/main/protobuf:srcs"], + outs = ["deps.proto"], + cmd = ("SRCS=($(locations @io_bazel//src/main/protobuf:srcs));" + + "cp $$(dirname $$SRCS)/deps.proto $@"), +) + +go_proto_library( + name = "go_default_library", + srcs = ["deps.proto"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/deps_proto/deps.gen.pb.go b/vendor/github.com/bazelbuild/buildtools/deps_proto/deps.gen.pb.go new file mode 100755 index 00000000000..88608d9273f --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/deps_proto/deps.gen.pb.go @@ -0,0 +1,227 @@ +// Code generated by protoc-gen-go. +// source: deps_proto/deps.proto +// DO NOT EDIT! + +/* +Package blaze_deps is a generated protocol buffer package. + +It is generated from these files: + deps_proto/deps.proto + +It has these top-level messages: + SourceLocation + Dependency + Dependencies +*/ +package blaze_deps + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Dependency_Kind int32 + +const ( + // Dependency used explicitly in the source. + Dependency_EXPLICIT Dependency_Kind = 0 + // Dependency that is implicitly loaded and used by the compiler. + Dependency_IMPLICIT Dependency_Kind = 1 + // Unused dependency. + Dependency_UNUSED Dependency_Kind = 2 + // Implicit dependency considered by the compiler but not completed. + Dependency_INCOMPLETE Dependency_Kind = 3 +) + +var Dependency_Kind_name = map[int32]string{ + 0: "EXPLICIT", + 1: "IMPLICIT", + 2: "UNUSED", + 3: "INCOMPLETE", +} +var Dependency_Kind_value = map[string]int32{ + "EXPLICIT": 0, + "IMPLICIT": 1, + "UNUSED": 2, + "INCOMPLETE": 3, +} + +func (x Dependency_Kind) Enum() *Dependency_Kind { + p := new(Dependency_Kind) + *p = x + return p +} +func (x Dependency_Kind) String() string { + return proto.EnumName(Dependency_Kind_name, int32(x)) +} +func (x *Dependency_Kind) UnmarshalJSON(data []byte) error { + value, err := proto.UnmarshalJSONEnum(Dependency_Kind_value, data, "Dependency_Kind") + if err != nil { + return err + } + *x = Dependency_Kind(value) + return nil +} +func (Dependency_Kind) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{1, 0} } + +// A specific location within a source file. +type SourceLocation struct { + Path *string `protobuf:"bytes,1,req,name=path" json:"path,omitempty"` + Line *int32 `protobuf:"varint,2,opt,name=line" json:"line,omitempty"` + Column *int32 `protobuf:"varint,3,opt,name=column" json:"column,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SourceLocation) Reset() { *m = SourceLocation{} } +func (m *SourceLocation) String() string { return proto.CompactTextString(m) } +func (*SourceLocation) ProtoMessage() {} +func (*SourceLocation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *SourceLocation) GetPath() string { + if m != nil && m.Path != nil { + return *m.Path + } + return "" +} + +func (m *SourceLocation) GetLine() int32 { + if m != nil && m.Line != nil { + return *m.Line + } + return 0 +} + +func (m *SourceLocation) GetColumn() int32 { + if m != nil && m.Column != nil { + return *m.Column + } + return 0 +} + +type Dependency struct { + // Path to the artifact representing this dependency. + Path *string `protobuf:"bytes,1,req,name=path" json:"path,omitempty"` + // Dependency kind + Kind *Dependency_Kind `protobuf:"varint,2,req,name=kind,enum=blaze_deps.Dependency_Kind" json:"kind,omitempty"` + // Source file locations: compilers can pinpoint the uses of a dependency. + Location []*SourceLocation `protobuf:"bytes,3,rep,name=location" json:"location,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Dependency) Reset() { *m = Dependency{} } +func (m *Dependency) String() string { return proto.CompactTextString(m) } +func (*Dependency) ProtoMessage() {} +func (*Dependency) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *Dependency) GetPath() string { + if m != nil && m.Path != nil { + return *m.Path + } + return "" +} + +func (m *Dependency) GetKind() Dependency_Kind { + if m != nil && m.Kind != nil { + return *m.Kind + } + return Dependency_EXPLICIT +} + +func (m *Dependency) GetLocation() []*SourceLocation { + if m != nil { + return m.Location + } + return nil +} + +// Top-level message found in .deps artifacts +type Dependencies struct { + Dependency []*Dependency `protobuf:"bytes,1,rep,name=dependency" json:"dependency,omitempty"` + // Name of the rule being analyzed. + RuleLabel *string `protobuf:"bytes,2,opt,name=rule_label,json=ruleLabel" json:"rule_label,omitempty"` + // Whether the action was successful; even when compilation fails, partial + // dependency information can be useful. + Success *bool `protobuf:"varint,3,opt,name=success" json:"success,omitempty"` + // Packages contained in the output jar, sorted alphabetically. + ContainedPackage []string `protobuf:"bytes,4,rep,name=contained_package,json=containedPackage" json:"contained_package,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *Dependencies) Reset() { *m = Dependencies{} } +func (m *Dependencies) String() string { return proto.CompactTextString(m) } +func (*Dependencies) ProtoMessage() {} +func (*Dependencies) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *Dependencies) GetDependency() []*Dependency { + if m != nil { + return m.Dependency + } + return nil +} + +func (m *Dependencies) GetRuleLabel() string { + if m != nil && m.RuleLabel != nil { + return *m.RuleLabel + } + return "" +} + +func (m *Dependencies) GetSuccess() bool { + if m != nil && m.Success != nil { + return *m.Success + } + return false +} + +func (m *Dependencies) GetContainedPackage() []string { + if m != nil { + return m.ContainedPackage + } + return nil +} + +func init() { + proto.RegisterType((*SourceLocation)(nil), "blaze_deps.SourceLocation") + proto.RegisterType((*Dependency)(nil), "blaze_deps.Dependency") + proto.RegisterType((*Dependencies)(nil), "blaze_deps.Dependencies") + proto.RegisterEnum("blaze_deps.Dependency_Kind", Dependency_Kind_name, Dependency_Kind_value) +} + +func init() { proto.RegisterFile("deps_proto/deps.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x92, 0x5f, 0x6b, 0xd4, 0x40, + 0x14, 0xc5, 0xcd, 0x1f, 0xeb, 0xe6, 0x5a, 0x96, 0x38, 0x60, 0x19, 0x14, 0x21, 0xe4, 0x29, 0x28, + 0x4c, 0xa1, 0x0f, 0x7d, 0xf4, 0xc1, 0x36, 0x0f, 0xc1, 0xb4, 0x86, 0x69, 0x0b, 0xbe, 0x85, 0x64, + 0xe6, 0xb2, 0x0e, 0x9d, 0xce, 0x84, 0x4d, 0xb2, 0xa2, 0x9f, 0xc9, 0x2f, 0xe2, 0xb7, 0x92, 0xcc, + 0xae, 0xd9, 0x2d, 0xec, 0xdb, 0x39, 0x27, 0xe7, 0x5e, 0x7e, 0xb9, 0x0c, 0xbc, 0x95, 0xd8, 0xf5, + 0x75, 0xb7, 0xb6, 0x83, 0x3d, 0x9f, 0x24, 0x73, 0x92, 0x40, 0xab, 0x9b, 0xdf, 0x58, 0x4f, 0x49, + 0x5a, 0xc1, 0xf2, 0xce, 0x8e, 0x6b, 0x81, 0xa5, 0x15, 0xcd, 0xa0, 0xac, 0x21, 0x04, 0xc2, 0xae, + 0x19, 0x7e, 0x50, 0x2f, 0xf1, 0xb3, 0x88, 0x3b, 0x3d, 0x65, 0x5a, 0x19, 0xa4, 0x7e, 0xe2, 0x65, + 0x2f, 0xb9, 0xd3, 0xe4, 0x0c, 0x4e, 0x84, 0xd5, 0xe3, 0x93, 0xa1, 0x81, 0x4b, 0x77, 0x2e, 0xfd, + 0xeb, 0x01, 0x5c, 0x63, 0x87, 0x46, 0xa2, 0x11, 0xbf, 0x8e, 0xae, 0x3b, 0x87, 0xf0, 0x51, 0x19, + 0x49, 0xfd, 0xc4, 0xcf, 0x96, 0x17, 0xef, 0xd9, 0x9e, 0x87, 0xed, 0x27, 0xd9, 0x57, 0x65, 0x24, + 0x77, 0x45, 0x72, 0x09, 0x0b, 0xbd, 0xe3, 0xa3, 0x41, 0x12, 0x64, 0xaf, 0x2f, 0xde, 0x1d, 0x0e, + 0x3d, 0xff, 0x03, 0x3e, 0x77, 0xd3, 0xcf, 0x10, 0x4e, 0x5b, 0xc8, 0x29, 0x2c, 0xf2, 0xef, 0x55, + 0x59, 0x5c, 0x15, 0xf7, 0xf1, 0x8b, 0xc9, 0x15, 0x37, 0x3b, 0xe7, 0x11, 0x80, 0x93, 0x87, 0xdb, + 0x87, 0xbb, 0xfc, 0x3a, 0xf6, 0xc9, 0x12, 0xa0, 0xb8, 0xbd, 0xfa, 0x76, 0x53, 0x95, 0xf9, 0x7d, + 0x1e, 0x07, 0xe9, 0x1f, 0x0f, 0x4e, 0x67, 0x22, 0x85, 0x3d, 0xb9, 0x04, 0x90, 0x33, 0x21, 0xf5, + 0x1c, 0xca, 0xd9, 0x71, 0x7e, 0x7e, 0xd0, 0x24, 0x1f, 0x00, 0xd6, 0xa3, 0xc6, 0x5a, 0x37, 0x2d, + 0x6a, 0x77, 0xc6, 0x88, 0x47, 0x53, 0x52, 0x4e, 0x01, 0xa1, 0xf0, 0xaa, 0x1f, 0x85, 0xc0, 0xbe, + 0x77, 0xc7, 0x5c, 0xf0, 0xff, 0x96, 0x7c, 0x82, 0x37, 0xc2, 0x9a, 0xa1, 0x51, 0x06, 0x65, 0xdd, + 0x35, 0xe2, 0xb1, 0x59, 0x21, 0x0d, 0x93, 0x20, 0x8b, 0x78, 0x3c, 0x7f, 0xa8, 0xb6, 0xf9, 0x97, + 0x8f, 0x90, 0x09, 0xfb, 0xc4, 0x56, 0xd6, 0xae, 0x34, 0x32, 0x89, 0x9b, 0xc1, 0x5a, 0xdd, 0xb3, + 0x76, 0x54, 0x5a, 0x32, 0xad, 0x5a, 0xb6, 0x51, 0xf8, 0x73, 0xfb, 0x08, 0xfe, 0x05, 0x00, 0x00, + 0xff, 0xff, 0xca, 0x96, 0x89, 0xb3, 0x1c, 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/bazelbuild/buildtools/differ/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/differ/BUILD.bazel new file mode 100644 index 00000000000..e50b5677af5 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/differ/BUILD.bazel @@ -0,0 +1,12 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = [ + "diff.go", + ] + select({ + "//:windows": ["isatty_windows.go"], + "//conditions:default": ["isatty_other.go"], + }), + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/differ/diff.go b/vendor/github.com/bazelbuild/buildtools/differ/diff.go new file mode 100644 index 00000000000..a91bea016c2 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/differ/diff.go @@ -0,0 +1,128 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Package differ determines how to invoke diff in the given environment. +package differ + +import ( + "fmt" + "os" + "os/exec" + "runtime" + "strings" +) + +// Invocation of different diff commands, according to environment variables. + +// A Differ describes how to invoke diff. +type Differ struct { + Cmd string // command + MultiDiff bool // diff accepts list of multiple pairs + Args []string // accumulated arguments +} + +// run runs the given command with args. +func (d *Differ) run(command string, args ...string) { + // The special diff command ":" means don't run anything. + if d.Cmd == ":" { + return + } + + // Pass args to bash and reference with $@ to avoid shell injection in args. + var cmd *exec.Cmd + if command == "FC" { + cmd = exec.Command(command, "/T") + } else { + cmd = exec.Command("/bin/bash", "-c", command+` "$@"`, "--") + } + cmd.Args = append(cmd.Args, args...) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Start(); err != nil { + // Couldn't even start bash. Worth reporting. + fmt.Fprintf(os.Stderr, "buildifier: %s: %v\n", command, err) + return + } + + // Assume bash reported anything else worth reporting. + // As long as the program started (above), we don't care about the + // exact exit status. In the most common case, the diff command + // will exit 1, because there are diffs, causing bash to exit 1. + cmd.Wait() +} + +// Show diffs old and new. +// For a single-pair diff program, Show runs the diff program before returning. +// For a multi-pair diff program, Show records the pair for later use by Run. +func (d *Differ) Show(old, new string) { + if !d.MultiDiff { + d.run(d.Cmd, old, new) + return + } + + d.Args = append(d.Args, ":", old, new) +} + +// Run runs any pending diffs. +// For a single-pair diff program, Show already ran diff; Run is a no-op. +// For a multi-pair diff program, Run displays the diffs queued by Show. +func (d *Differ) Run() { + if !d.MultiDiff { + return + } + + if len(d.Args) == 0 { + return + } + d.run(d.Cmd, d.Args...) +} + +// Find returns the differ to use, using various environment variables. +func Find() *Differ { + d := &Differ{} + if cmd := os.Getenv("BUILDIFIER_DIFF"); cmd != "" { + d.Cmd = cmd + } + + // Load MultiDiff setting from environment. + knowMultiDiff := false + if md := os.Getenv("BUILDIFIER_MULTIDIFF"); md == "0" || md == "1" { + d.MultiDiff = md == "1" + knowMultiDiff = true + } + + if d.Cmd != "" { + if !knowMultiDiff { + lower := strings.ToLower(d.Cmd) + d.MultiDiff = strings.Contains(lower, "tkdiff") && + isatty(1) && os.Getenv("DISPLAY") != "" + } + } else { + if !knowMultiDiff { + d.MultiDiff = isatty(1) && os.Getenv("DISPLAY") != "" + } + if d.MultiDiff { + d.Cmd = "tkdiff" + } else { + if runtime.GOOS == "windows" { + d.Cmd = "FC" + } else { + d.Cmd = "diff" + } + } + } + return d +} diff --git a/vendor/github.com/bazelbuild/buildtools/differ/isatty_other.go b/vendor/github.com/bazelbuild/buildtools/differ/isatty_other.go new file mode 100644 index 00000000000..07ec49b1757 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/differ/isatty_other.go @@ -0,0 +1,29 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build !windows + +package differ + +import "syscall" + +// isatty reports whether fd is a tty. +// Actually it reports whether fd is a character device, which is close enough. +func isatty(fd int) bool { + var st syscall.Stat_t + if err := syscall.Fstat(fd, &st); err != nil { + return false + } + return st.Mode&syscall.S_IFMT == syscall.S_IFCHR +} diff --git a/vendor/github.com/bazelbuild/buildtools/differ/isatty_windows.go b/vendor/github.com/bazelbuild/buildtools/differ/isatty_windows.go new file mode 100644 index 00000000000..d308081e852 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/differ/isatty_windows.go @@ -0,0 +1,23 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build windows + +package differ + +// isatty reports whether fd is a tty. +// On Windows we just say no. +func isatty(fd int) bool { + return false +} diff --git a/vendor/github.com/bazelbuild/buildtools/edit/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/edit/BUILD.bazel new file mode 100644 index 00000000000..a6b0063a49d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/edit/BUILD.bazel @@ -0,0 +1,29 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = [ + "buildozer.go", + "edit.go", + "fix.go", + "types.go", + ], + visibility = ["//visibility:public"], + deps = [ + "//api_proto:go_default_library", + "//build:go_default_library", + "//build_proto:go_default_library", + "//file:go_default_library", + "//lang:go_default_library", + "//tables:go_default_library", + "//wspace:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = ["edit_test.go"], + library = ":go_default_library", + deps = ["//build:go_default_library"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/edit/buildozer.go b/vendor/github.com/bazelbuild/buildtools/edit/buildozer.go new file mode 100644 index 00000000000..5bd618ebbbc --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/edit/buildozer.go @@ -0,0 +1,976 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Buildozer is a tool for programatically editing BUILD files. + +package edit + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "log" + "os" + "os/exec" + "path" + "path/filepath" + "regexp" + "runtime" + "strconv" + "strings" + + apipb "github.com/bazelbuild/buildtools/api_proto" + "github.com/bazelbuild/buildtools/build" + "github.com/bazelbuild/buildtools/file" + "github.com/golang/protobuf/proto" +) + +// Options represents choices about how buildozer should behave. +type Options struct { + Stdout bool // write changed BUILD file to stdout + Buildifier string // path to buildifier binary + Parallelism int // number of cores to use for concurrent actions + NumIO int // number of concurrent actions + CommandsFile string // file name to read commands from, use '-' for stdin (format:|-separated command line arguments to buildozer, excluding flags + KeepGoing bool // apply all commands, even if there are failures + FilterRuleTypes []string // list of rule types to change, empty means all + PreferEOLComments bool // when adding a new comment, put it on the same line if possible + RootDir string // If present, use this folder rather than $PWD to find the root dir + Quiet bool // suppress informational messages. + EditVariables bool // for attributes that simply assign a variable (e.g. hdrs = LIB_HDRS), edit the build variable instead of appending to the attribute. + IsPrintingProto bool // output serialized devtools.buildozer.Output protos instead of human-readable strings +} + +// Opts represents the options to be used by buildozer, and can be overriden before calling Buildozer. +var Opts = Options{NumIO: 200, PreferEOLComments: true} + +// Usage is a user-overriden func to print the program usage. +var Usage = func() {} + +var fileModified = false // set to true when a file has been fixed + +const stdinPackageName = "-" // the special package name to represent stdin + +// CmdEnvironment stores the information the commands below have access to. +type CmdEnvironment struct { + File *build.File // the AST + Rule *build.Rule // the rule to modify + Vars map[string]*build.BinaryExpr // global variables set in the build file + Pkg string // the full package name + Args []string // the command-line arguments + output *apipb.Output_Record // output proto, stores whatever a command wants to print +} + +// The cmdXXX functions implement the various commands. + +func cmdAdd(env CmdEnvironment) (*build.File, error) { + attr := env.Args[0] + for _, val := range env.Args[1:] { + if IsIntList(attr) { + AddValueToListAttribute(env.Rule, attr, env.Pkg, &build.LiteralExpr{Token: val}, &env.Vars) + continue + } + strVal := &build.StringExpr{Value: ShortenLabel(val, env.Pkg)} + AddValueToListAttribute(env.Rule, attr, env.Pkg, strVal, &env.Vars) + } + return env.File, nil +} + +func cmdComment(env CmdEnvironment) (*build.File, error) { + // The comment string is always the last argument in the list. + str := env.Args[len(env.Args)-1] + str = strings.Replace(str, "\\n", "\n", -1) + // Multiline comments should go on a separate line. + fullLine := !Opts.PreferEOLComments || strings.Contains(str, "\n") + str = strings.Replace("# "+str, "\n", "\n# ", -1) + comment := []build.Comment{build.Comment{Token: str}} + + // The comment might be attached to a rule, an attribute, or a value in a list, + // depending on how many arguments are passed. + switch len(env.Args) { + case 1: // Attach to a rule + env.Rule.Call.Comments.Before = comment + case 2: // Attach to an attribute + if attr := env.Rule.AttrDefn(env.Args[0]); attr != nil { + if fullLine { + attr.X.Comment().Before = comment + } else { + attr.Y.Comment().Suffix = comment + } + } + case 3: // Attach to a specific value in a list + if attr := env.Rule.Attr(env.Args[0]); attr != nil { + if expr := ListFind(attr, env.Args[1], env.Pkg); expr != nil { + if fullLine { + expr.Comments.Before = comment + } else { + expr.Comments.Suffix = comment + } + } + } + default: + panic("cmdComment") + } + return env.File, nil +} + +// commentsText concatenates comments into a single line. +func commentsText(comments []build.Comment) string { + var segments []string + for _, comment := range comments { + token := comment.Token + if strings.HasPrefix(token, "#") { + token = token[1:] + } + segments = append(segments, strings.TrimSpace(token)) + } + return strings.Replace(strings.Join(segments, " "), "\n", " ", -1) +} + +func cmdPrintComment(env CmdEnvironment) (*build.File, error) { + attrError := func() error { + return fmt.Errorf("rule \"//%s:%s\" has no attribute \"%s\"", env.Pkg, env.Rule.Name(), env.Args[0]) + } + + switch len(env.Args) { + case 0: // Print rule comment. + env.output.Fields = []*apipb.Output_Record_Field{ + &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{commentsText(env.Rule.Call.Comments.Before)}}, + } + case 1: // Print attribute comment. + attr := env.Rule.AttrDefn(env.Args[0]) + if attr == nil { + return nil, attrError() + } + comments := append(attr.Before, attr.Suffix...) + env.output.Fields = []*apipb.Output_Record_Field{ + &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{commentsText(comments)}}, + } + case 2: // Print comment of a specific value in a list. + attr := env.Rule.Attr(env.Args[0]) + if attr == nil { + return nil, attrError() + } + value := env.Args[1] + expr := ListFind(attr, value, env.Pkg) + if expr == nil { + return nil, fmt.Errorf("attribute \"%s\" has no value \"%s\"", env.Args[0], value) + } + comments := append(expr.Comments.Before, expr.Comments.Suffix...) + env.output.Fields = []*apipb.Output_Record_Field{ + &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{commentsText(comments)}}, + } + default: + panic("cmdPrintComment") + } + return nil, nil +} + +func cmdDelete(env CmdEnvironment) (*build.File, error) { + return DeleteRule(env.File, env.Rule), nil +} + +func cmdMove(env CmdEnvironment) (*build.File, error) { + oldAttr := env.Args[0] + newAttr := env.Args[1] + if len(env.Args) == 3 && env.Args[2] == "*" { + if err := MoveAllListAttributeValues(env.Rule, oldAttr, newAttr, env.Pkg, &env.Vars); err != nil { + return nil, err + } + return env.File, nil + } + fixed := false + for _, val := range env.Args[2:] { + if deleted := ListAttributeDelete(env.Rule, oldAttr, val, env.Pkg); deleted != nil { + AddValueToListAttribute(env.Rule, newAttr, env.Pkg, deleted, &env.Vars) + fixed = true + } + } + if fixed { + return env.File, nil + } + return nil, nil +} + +func cmdNew(env CmdEnvironment) (*build.File, error) { + kind := env.Args[0] + name := env.Args[1] + addAtEOF, insertionIndex, err := findInsertionIndex(env) + if err != nil { + return nil, err + } + + if FindRuleByName(env.File, name) != nil { + return nil, fmt.Errorf("rule '%s' already exists", name) + } + + call := &build.CallExpr{X: &build.LiteralExpr{Token: kind}} + rule := &build.Rule{Call: call} + rule.SetAttr("name", &build.StringExpr{Value: name}) + + if addAtEOF { + env.File.Stmt = InsertAfterLastOfSameKind(env.File.Stmt, rule.Call) + } else { + env.File.Stmt = InsertAfter(insertionIndex, env.File.Stmt, call) + } + return env.File, nil +} + +// findInsertionIndex is used by cmdNew to find the place at which to insert the new rule. +func findInsertionIndex(env CmdEnvironment) (bool, int, error) { + if len(env.Args) < 4 { + return true, 0, nil + } + + relativeToRuleName := env.Args[3] + ruleIdx := IndexOfRuleByName(env.File, relativeToRuleName) + if ruleIdx == -1 { + return true, 0, nil + } + + switch env.Args[2] { + case "before": + return false, ruleIdx - 1, nil + case "after": + return false, ruleIdx, nil + default: + return true, 0, fmt.Errorf("Unknown relative operator '%s'; allowed: 'before', 'after'", env.Args[1]) + } +} + +func cmdNewLoad(env CmdEnvironment) (*build.File, error) { + env.File.Stmt = InsertLoad(env.File.Stmt, env.Args) + return env.File, nil +} + +func cmdPrint(env CmdEnvironment) (*build.File, error) { + format := env.Args + if len(format) == 0 { + format = []string{"name", "kind"} + } + fields := make([]*apipb.Output_Record_Field, len(format)) + + for i, str := range format { + value := env.Rule.Attr(str) + if str == "kind" { + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{env.Rule.Kind()}} + } else if str == "label" { + if env.Rule.Attr("name") != nil { + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{fmt.Sprintf("//%s:%s", env.Pkg, env.Rule.Name())}} + } else { + return nil, nil + } + } else if str == "rule" { + fields[i] = &apipb.Output_Record_Field{ + Value: &apipb.Output_Record_Field_Text{build.FormatString(env.Rule.Call)}, + } + } else if str == "startline" { + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Number{int32(env.Rule.Call.ListStart.Line)}} + } else if str == "endline" { + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Number{int32(env.Rule.Call.End.Pos.Line)}} + } else if value == nil { + fmt.Fprintf(os.Stderr, "rule \"//%s:%s\" has no attribute \"%s\"\n", + env.Pkg, env.Rule.Name(), str) + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Error{Error: apipb.Output_Record_Field_MISSING}} + } else if lit, ok := value.(*build.LiteralExpr); ok { + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{lit.Token}} + } else if string, ok := value.(*build.StringExpr); ok { + fields[i] = &apipb.Output_Record_Field{ + Value: &apipb.Output_Record_Field_Text{string.Value}, + QuoteWhenPrinting: true, + } + } else if strList := env.Rule.AttrStrings(str); strList != nil { + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_List{List: &apipb.RepeatedString{Strings: strList}}} + } else { + // Some other Expr we haven't listed above. Just print it. + fields[i] = &apipb.Output_Record_Field{Value: &apipb.Output_Record_Field_Text{build.FormatString(value)}} + } + } + + env.output.Fields = fields + return nil, nil +} + +func attrKeysForPattern(rule *build.Rule, pattern string) []string { + if pattern == "*" { + return rule.AttrKeys() + } + return []string{pattern} +} + +func cmdRemove(env CmdEnvironment) (*build.File, error) { + if len(env.Args) == 1 { // Remove the attribute + if env.Rule.DelAttr(env.Args[0]) != nil { + return env.File, nil + } + } else { // Remove values in the attribute. + fixed := false + for _, key := range attrKeysForPattern(env.Rule, env.Args[0]) { + for _, val := range env.Args[1:] { + ListAttributeDelete(env.Rule, key, val, env.Pkg) + fixed = true + } + } + if fixed { + return env.File, nil + } + } + return nil, nil +} + +func cmdRename(env CmdEnvironment) (*build.File, error) { + oldAttr := env.Args[0] + newAttr := env.Args[1] + if err := RenameAttribute(env.Rule, oldAttr, newAttr); err != nil { + return nil, err + } + return env.File, nil +} + +func cmdReplace(env CmdEnvironment) (*build.File, error) { + oldV := env.Args[1] + newV := env.Args[2] + for _, key := range attrKeysForPattern(env.Rule, env.Args[0]) { + attr := env.Rule.Attr(key) + if e, ok := attr.(*build.StringExpr); ok { + if LabelsEqual(e.Value, oldV, env.Pkg) { + env.Rule.SetAttr(key, getAttrValueExpr(key, []string{newV})) + } + } else { + ListReplace(attr, oldV, newV, env.Pkg) + } + } + return env.File, nil +} + +func cmdSet(env CmdEnvironment) (*build.File, error) { + attr := env.Args[0] + args := env.Args[1:] + if attr == "kind" { + env.Rule.SetKind(args[0]) + } else { + env.Rule.SetAttr(attr, getAttrValueExpr(attr, args)) + } + return env.File, nil +} + +func cmdSetIfAbsent(env CmdEnvironment) (*build.File, error) { + attr := env.Args[0] + args := env.Args[1:] + if attr == "kind" { + return nil, fmt.Errorf("setting 'kind' is not allowed for set_if_absent. Got %s", env.Args) + } + if env.Rule.Attr(attr) == nil { + env.Rule.SetAttr(attr, getAttrValueExpr(attr, args)) + } + return env.File, nil +} + +func getAttrValueExpr(attr string, args []string) build.Expr { + switch { + case attr == "kind": + return nil + case IsIntList(attr): + var list []build.Expr + for _, i := range args { + list = append(list, &build.LiteralExpr{Token: i}) + } + return &build.ListExpr{List: list} + case IsList(attr) && !(len(args) == 1 && strings.HasPrefix(args[0], "glob(")): + var list []build.Expr + for _, i := range args { + list = append(list, &build.StringExpr{Value: i}) + } + return &build.ListExpr{List: list} + case IsString(attr): + return &build.StringExpr{Value: args[0]} + default: + return &build.LiteralExpr{Token: args[0]} + } +} + +func cmdCopy(env CmdEnvironment) (*build.File, error) { + attrName := env.Args[0] + from := env.Args[1] + + return copyAttributeBetweenRules(env, attrName, from) +} + +func cmdCopyNoOverwrite(env CmdEnvironment) (*build.File, error) { + attrName := env.Args[0] + from := env.Args[1] + + if env.Rule.Attr(attrName) != nil { + return env.File, nil + } + + return copyAttributeBetweenRules(env, attrName, from) +} + +func copyAttributeBetweenRules(env CmdEnvironment, attrName string, from string) (*build.File, error) { + fromRule := FindRuleByName(env.File, from) + if fromRule == nil { + return nil, fmt.Errorf("could not find rule '%s'", from) + } + attr := fromRule.Attr(attrName) + if attr == nil { + return nil, fmt.Errorf("rule '%s' does not have attribute '%s'", from, attrName) + } + + ast, err := build.Parse("" /* filename */, []byte(build.FormatString(attr))) + if err != nil { + return nil, fmt.Errorf("could not parse attribute value %v", build.FormatString(attr)) + } + + env.Rule.SetAttr(attrName, ast.Stmt[0]) + return env.File, nil +} + +func cmdFix(env CmdEnvironment) (*build.File, error) { + // Fix the whole file + if env.Rule.Kind() == "package" { + return FixFile(env.File, env.Pkg, env.Args), nil + } + // Fix a specific rule + return FixRule(env.File, env.Pkg, env.Rule, env.Args), nil +} + +// CommandInfo provides a command function and info on incoming arguments. +type CommandInfo struct { + Fn func(CmdEnvironment) (*build.File, error) + MinArg int + MaxArg int + Template string +} + +// AllCommands associates the command names with their function and number +// of arguments. +var AllCommands = map[string]CommandInfo{ + "add": {cmdAdd, 2, -1, " "}, + "new_load": {cmdNewLoad, 1, -1, " "}, + "comment": {cmdComment, 1, 3, "? ? "}, + "print_comment": {cmdPrintComment, 0, 2, "? ?"}, + "delete": {cmdDelete, 0, 0, ""}, + "fix": {cmdFix, 0, -1, "?"}, + "move": {cmdMove, 3, -1, " "}, + "new": {cmdNew, 2, 4, " [(before|after) ]"}, + "print": {cmdPrint, 0, -1, ""}, + "remove": {cmdRemove, 1, -1, " "}, + "rename": {cmdRename, 2, 2, " "}, + "replace": {cmdReplace, 3, 3, " "}, + "set": {cmdSet, 2, -1, " "}, + "set_if_absent": {cmdSetIfAbsent, 2, -1, " "}, + "copy": {cmdCopy, 2, 2, " "}, + "copy_no_overwrite": {cmdCopyNoOverwrite, 2, 2, " "}, +} + +func expandTargets(f *build.File, rule string) ([]*build.Rule, error) { + if r := FindRuleByName(f, rule); r != nil { + return []*build.Rule{r}, nil + } else if r := FindExportedFile(f, rule); r != nil { + return []*build.Rule{r}, nil + } else if rule == "all" || rule == "*" { + // "all" is a valid name, it is a wildcard only if no such rule is found. + return f.Rules(""), nil + } else if strings.HasPrefix(rule, "%") { + // "%java_library" will match all java_library functions in the package + // "%" will match the rule which begins at LINENUM. + // This is for convenience, "%" is not a valid character in bazel targets. + kind := rule[1:] + if linenum, err := strconv.Atoi(kind); err == nil { + if r := f.RuleAt(linenum); r != nil { + return []*build.Rule{r}, nil + } + } else { + return f.Rules(kind), nil + } + } + return nil, fmt.Errorf("rule '%s' not found", rule) +} + +func filterRules(rules []*build.Rule) (result []*build.Rule) { + if len(Opts.FilterRuleTypes) == 0 { + return rules + } + for _, rule := range rules { + acceptableType := false + for _, filterType := range Opts.FilterRuleTypes { + if rule.Kind() == filterType { + acceptableType = true + break + } + } + if acceptableType || rule.Kind() == "package" { + result = append(result, rule) + } + } + return +} + +// command contains a list of tokens that describe a buildozer command. +type command struct { + tokens []string +} + +// checkCommandUsage checks the number of argument of a command. +// It prints an error and usage when it is not valid. +func checkCommandUsage(name string, cmd CommandInfo, count int) { + if count >= cmd.MinArg && (cmd.MaxArg == -1 || count <= cmd.MaxArg) { + return + } + + if count < cmd.MinArg { + fmt.Fprintf(os.Stderr, "Too few arguments for command '%s', expected at least %d.\n", + name, cmd.MinArg) + } else { + fmt.Fprintf(os.Stderr, "Too many arguments for command '%s', expected at most %d.\n", + name, cmd.MaxArg) + } + Usage() +} + +// Match text that only contains spaces if they're escaped with '\'. +var spaceRegex = regexp.MustCompile(`(\\ |[^ ])+`) + +// SplitOnSpaces behaves like strings.Fields, except that spaces can be escaped. +// " some dummy\\ string" -> ["some", "dummy string"] +func SplitOnSpaces(input string) []string { + result := spaceRegex.FindAllString(input, -1) + for i, s := range result { + result[i] = strings.Replace(s, `\ `, " ", -1) + } + return result +} + +// parseCommands parses commands and targets they should be applied on from +// a list of arguments. +// Each argument can be either: +// - a command (as defined by AllCommands) and its parameters, separated by +// whitespace +// - a target all commands that are parsed during one call to parseCommands +// should be applied on +func parseCommands(args []string) (commands []command, targets []string) { + for _, arg := range args { + commandTokens := SplitOnSpaces(arg) + cmd, found := AllCommands[commandTokens[0]] + if found { + checkCommandUsage(commandTokens[0], cmd, len(commandTokens)-1) + commands = append(commands, command{commandTokens}) + } else { + targets = append(targets, arg) + } + } + return +} + +// commandsForTarget contains commands to be executed on the given target. +type commandsForTarget struct { + target string + commands []command +} + +// commandsForFile contains the file name and all commands that should be +// applied on that file, indexed by their target. +type commandsForFile struct { + file string + commands []commandsForTarget +} + +// commandError returns an error that formats 'err' in the context of the +// commands to be executed on the given target. +func commandError(commands []command, target string, err error) error { + return fmt.Errorf("error while executing commands %s on target %s: %s", commands, target, err) +} + +// rewriteResult contains the outcome of applying fixes to a single file. +type rewriteResult struct { + file string + errs []error + modified bool + records []*apipb.Output_Record +} + +// getGlobalVariables returns the global variable assignments in the provided list of expressions. +// That is, for each variable assignment of the form +// a = v +// vars["a"] will contain the BinaryExpr whose Y value is the assignment "a = v". +func getGlobalVariables(exprs []build.Expr) (vars map[string]*build.BinaryExpr) { + vars = make(map[string]*build.BinaryExpr) + for _, expr := range exprs { + if binExpr, ok := expr.(*build.BinaryExpr); ok { + if binExpr.Op != "=" { + continue + } + if lhs, ok := binExpr.X.(*build.LiteralExpr); ok { + vars[lhs.Token] = binExpr + } + } + } + return vars +} + +// When checking the filesystem, we need to look for any of the +// possible buildFileNames. For historical reasons, the +// parts of the tool that generate paths that we may want to examine +// continue to assume that build files are all named "BUILD". +var buildFileNames = [...]string{"BUILD.bazel", "BUILD", "BUCK"} +var buildFileNamesSet = map[string]bool{ + "BUILD.bazel": true, + "BUILD": true, + "BUCK": true, +} + +// rewrite parses the BUILD file for the given file, transforms the AST, +// and write the changes back in the file (or on stdout). +func rewrite(commandsForFile commandsForFile) *rewriteResult { + name := commandsForFile.file + var data []byte + var err error + var fi os.FileInfo + records := []*apipb.Output_Record{} + if name == stdinPackageName { // read on stdin + data, err = ioutil.ReadAll(os.Stdin) + if err != nil { + return &rewriteResult{file: name, errs: []error{err}} + } + } else { + origName := name + for _, suffix := range buildFileNames { + if strings.HasSuffix(name, "/"+suffix) { + name = strings.TrimSuffix(name, suffix) + break + } + } + for _, suffix := range buildFileNames { + name = name + suffix + data, fi, err = file.ReadFile(name) + if err == nil { + break + } + name = strings.TrimSuffix(name, suffix) + } + if err != nil { + data, fi, err = file.ReadFile(name) + } + if err != nil { + err = errors.New("file not found or not readable") + return &rewriteResult{file: origName, errs: []error{err}} + } + } + + f, err := build.Parse(name, data) + if err != nil { + return &rewriteResult{file: name, errs: []error{err}} + } + + vars := map[string]*build.BinaryExpr{} + if Opts.EditVariables { + vars = getGlobalVariables(f.Stmt) + } + var errs []error + changed := false + for _, commands := range commandsForFile.commands { + target := commands.target + commands := commands.commands + _, absPkg, rule := InterpretLabelForWorkspaceLocation(Opts.RootDir, target) + _, pkg, _ := ParseLabel(target) + if pkg == stdinPackageName { // Special-case: This is already absolute + absPkg = stdinPackageName + } + + targets, err := expandTargets(f, rule) + if err != nil { + cerr := commandError(commands, target, err) + errs = append(errs, cerr) + if !Opts.KeepGoing { + return &rewriteResult{file: name, errs: errs, records: records} + + } + } + targets = filterRules(targets) + for _, cmd := range commands { + for _, r := range targets { + cmdInfo := AllCommands[cmd.tokens[0]] + record := &apipb.Output_Record{} + newf, err := cmdInfo.Fn(CmdEnvironment{f, r, vars, absPkg, cmd.tokens[1:], record}) + if len(record.Fields) != 0 { + records = append(records, record) + } + if err != nil { + cerr := commandError([]command{cmd}, target, err) + if Opts.KeepGoing { + errs = append(errs, cerr) + } else { + return &rewriteResult{file: name, errs: []error{cerr}, records: records} + } + } + if newf != nil { + changed = true + f = newf + } + } + } + } + if !changed { + return &rewriteResult{file: name, errs: errs, records: records} + } + f = RemoveEmptyPackage(f) + ndata, err := runBuildifier(f) + if err != nil { + return &rewriteResult{file: name, errs: []error{fmt.Errorf("running buildifier: %v", err)}, records: records} + } + + if Opts.Stdout || name == stdinPackageName { + os.Stdout.Write(ndata) + return &rewriteResult{file: name, errs: errs, records: records} + } + + if bytes.Equal(data, ndata) { + return &rewriteResult{file: name, errs: errs, records: records} + } + + if err := EditFile(fi, name); err != nil { + return &rewriteResult{file: name, errs: []error{err}, records: records} + } + + if err := file.WriteFile(name, ndata); err != nil { + return &rewriteResult{file: name, errs: []error{err}, records: records} + } + + fileModified = true + return &rewriteResult{file: name, errs: errs, modified: true, records: records} +} + +// EditFile is a function that does any prework needed before editing a file. +// e.g. "checking out for write" from a locking source control repo. +var EditFile = func(fi os.FileInfo, name string) error { + return nil +} + +// runBuildifier formats the build file f. +// Runs Opts.Buildifier if it's non-empty, otherwise uses built-in formatter. +// Opts.Buildifier is useful to force consistency with other tools that call Buildifier. +func runBuildifier(f *build.File) ([]byte, error) { + if Opts.Buildifier == "" { + build.Rewrite(f, nil) + return build.Format(f), nil + } + + cmd := exec.Command(Opts.Buildifier) + data := build.Format(f) + cmd.Stdin = bytes.NewBuffer(data) + stdout := bytes.NewBuffer(nil) + stderr := bytes.NewBuffer(nil) + cmd.Stdout = stdout + cmd.Stderr = stderr + err := cmd.Run() + if stderr.Len() > 0 { + return nil, fmt.Errorf("%s", stderr.Bytes()) + } + if err != nil { + return nil, err + } + return stdout.Bytes(), nil +} + +// Given a target, whose package may contain a trailing "/...", returns all +// extisting BUILD file paths which match the package. +func targetExpressionToBuildFiles(target string) []string { + file, _, _ := InterpretLabelForWorkspaceLocation(Opts.RootDir, target) + if Opts.RootDir == "" { + var err error + if file, err = filepath.Abs(file); err != nil { + fmt.Printf("Cannot make path absolute: %s\n", err.Error()) + os.Exit(1) + } + } + + if !strings.HasSuffix(file, "/.../BUILD") { + return []string{file} + } + + var buildFiles []string + searchDirs := []string{strings.TrimSuffix(file, "/.../BUILD")} + for len(searchDirs) != 0 { + lastIndex := len(searchDirs) - 1 + dir := searchDirs[lastIndex] + searchDirs = searchDirs[:lastIndex] + + dirFiles, err := ioutil.ReadDir(dir) + if err != nil { + continue + } + + for _, dirFile := range dirFiles { + if dirFile.IsDir() { + searchDirs = append(searchDirs, path.Join(dir, dirFile.Name())) + } else if _, ok := buildFileNamesSet[dirFile.Name()]; ok { + buildFiles = append(buildFiles, path.Join(dir, dirFile.Name())) + } + } + } + + return buildFiles +} + +// appendCommands adds the given commands to be applied to each of the given targets +// via the commandMap. +func appendCommands(commandMap map[string][]commandsForTarget, args []string) { + commands, targets := parseCommands(args) + for _, target := range targets { + if strings.HasSuffix(target, "/BUILD") { + target = strings.TrimSuffix(target, "/BUILD") + ":__pkg__" + } + var buildFiles []string + _, pkg, _ := ParseLabel(target) + if pkg == stdinPackageName { + buildFiles = []string{stdinPackageName} + } else { + buildFiles = targetExpressionToBuildFiles(target) + } + + for _, file := range buildFiles { + commandMap[file] = append(commandMap[file], commandsForTarget{target, commands}) + } + } +} + +func appendCommandsFromFile(commandsByFile map[string][]commandsForTarget, fileName string) { + var reader io.Reader + if Opts.CommandsFile == stdinPackageName { + reader = os.Stdin + } else { + rc := file.OpenReadFile(Opts.CommandsFile) + reader = rc + defer rc.Close() + } + scanner := bufio.NewScanner(reader) + for scanner.Scan() { + line := scanner.Text() + if line == "" { + continue + } + args := strings.Split(line, "|") + appendCommands(commandsByFile, args) + } + if err := scanner.Err(); err != nil { + fmt.Fprintf(os.Stderr, "Error while reading commands file: %v", scanner.Err()) + } +} + +func printRecord(writer io.Writer, record *apipb.Output_Record) { + fields := record.Fields + line := make([]string, len(fields)) + for i, field := range fields { + switch value := field.Value.(type) { + case *apipb.Output_Record_Field_Text: + if field.QuoteWhenPrinting && strings.ContainsRune(value.Text, ' ') { + line[i] = fmt.Sprintf("%q", value.Text) + } else { + line[i] = value.Text + } + break + case *apipb.Output_Record_Field_Number: + line[i] = strconv.Itoa(int(value.Number)) + break + case *apipb.Output_Record_Field_Error: + switch value.Error { + case apipb.Output_Record_Field_UNKNOWN: + line[i] = "(unknown)" + break + case apipb.Output_Record_Field_MISSING: + line[i] = "(missing)" + break + } + break + case *apipb.Output_Record_Field_List: + line[i] = fmt.Sprintf("[%s]", strings.Join(value.List.Strings, " ")) + break + } + } + + fmt.Fprint(writer, strings.Join(line, " ")+"\n") +} + +// Buildozer loops over all arguments on the command line fixing BUILD files. +func Buildozer(args []string) int { + commandsByFile := make(map[string][]commandsForTarget) + if Opts.CommandsFile != "" { + appendCommandsFromFile(commandsByFile, Opts.CommandsFile) + } else { + if len(args) == 0 { + Usage() + } + appendCommands(commandsByFile, args) + } + + numFiles := len(commandsByFile) + if Opts.Parallelism > 0 { + runtime.GOMAXPROCS(Opts.Parallelism) + } + results := make(chan *rewriteResult, numFiles) + data := make(chan commandsForFile) + + for i := 0; i < Opts.NumIO; i++ { + go func(results chan *rewriteResult, data chan commandsForFile) { + for commandsForFile := range data { + results <- rewrite(commandsForFile) + } + }(results, data) + } + + for file, commands := range commandsByFile { + data <- commandsForFile{file, commands} + } + close(data) + records := []*apipb.Output_Record{} + hasErrors := false + for i := 0; i < numFiles; i++ { + fileResults := <-results + if fileResults == nil { + continue + } + hasErrors = hasErrors || len(fileResults.errs) > 0 + for _, err := range fileResults.errs { + fmt.Fprintf(os.Stderr, "%s: %s\n", fileResults.file, err) + } + if fileResults.modified && !Opts.Quiet { + fmt.Fprintf(os.Stderr, "fixed %s\n", fileResults.file) + } + if fileResults.records != nil { + records = append(records, fileResults.records...) + } + } + + if Opts.IsPrintingProto { + data, err := proto.Marshal(&apipb.Output{Records: records}) + if err != nil { + log.Fatal("marshaling error: ", err) + } + fmt.Fprintf(os.Stdout, "%s", data) + } else { + for _, record := range records { + printRecord(os.Stdout, record) + } + } + + if hasErrors { + return 2 + } + if !fileModified && !Opts.Stdout { + return 3 + } + return 0 +} diff --git a/vendor/github.com/bazelbuild/buildtools/edit/edit.go b/vendor/github.com/bazelbuild/buildtools/edit/edit.go new file mode 100644 index 00000000000..c9b541fb595 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/edit/edit.go @@ -0,0 +1,823 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Package edit provides high-level auxiliary functions for AST manipulation +// on BUILD files. +package edit + +import ( + "fmt" + "os" + "path" + "path/filepath" + "sort" + "strconv" + "strings" + + "github.com/bazelbuild/buildtools/build" + "github.com/bazelbuild/buildtools/wspace" +) + +var ( + // ShortenLabelsFlag if true converts added labels to short form , e.g. //foo:bar => :bar + ShortenLabelsFlag = true + // DeleteWithComments if true a list attribute will be be deleted in ListDelete, even if there is a comment attached to it + DeleteWithComments = true +) + +// ParseLabel parses a Blaze label (eg. //devtools/buildozer:rule), and returns +// the repo name ("" for the main repo), package (with leading slashes trimmed) +// and rule name (e.g. ["", "devtools/buildozer", "rule"]). +func ParseLabel(target string) (string, string, string) { + repo := "" + if strings.HasPrefix(target, "@") { + target = strings.TrimLeft(target, "@") + parts := strings.SplitN(target, "/", 2) + if len(parts) == 1 { + // "@foo" -> "foo", "", "foo" (ie @foo//:foo) + return target, "", target + } + repo = parts[0] + target = "/" + parts[1] + } + // TODO(bazel-team): check if the next line can now be deleted + target = strings.TrimRight(target, ":") // labels can end with ':' + parts := strings.SplitN(target, ":", 2) + parts[0] = strings.TrimPrefix(parts[0], "//") + if len(parts) == 1 { + if strings.HasPrefix(target, "//") { + // "//absolute/pkg" -> "absolute/pkg", "pkg" + return repo, parts[0], path.Base(parts[0]) + } + // "relative/label" -> "", "relative/label" + return repo, "", parts[0] + } + return repo, parts[0], parts[1] +} + +// ShortenLabel rewrites labels to use the canonical form (the form +// recommended by build-style). This behavior can be disabled using the +// --noshorten_labels flag for projects that consistently use long-form labels. +// "//foo/bar:bar" => "//foo/bar", or ":bar" when possible. +func ShortenLabel(label string, pkg string) string { + if !ShortenLabelsFlag { + return label + } + if !strings.HasPrefix(label, "//") { + // It doesn't look like a long label, so we preserve it. + return label + } + repo, labelPkg, rule := ParseLabel(label) + if repo == "" && labelPkg == pkg { // local label + return ":" + rule + } + slash := strings.LastIndex(labelPkg, "/") + if (slash >= 0 && labelPkg[slash+1:] == rule) || labelPkg == rule { + return "//" + labelPkg + } + return label +} + +// LabelsEqual returns true if label1 and label2 are equal. The function +// takes care of the optional ":" prefix and differences between long-form +// labels and local labels. +func LabelsEqual(label1, label2, pkg string) bool { + str1 := strings.TrimPrefix(ShortenLabel(label1, pkg), ":") + str2 := strings.TrimPrefix(ShortenLabel(label2, pkg), ":") + return str1 == str2 +} + +// isFile returns true if the path refers to a regular file after following +// symlinks. +func isFile(path string) bool { + path, err := filepath.EvalSymlinks(path) + if err != nil { + return false + } + info, err := os.Stat(path) + if err != nil { + return false + } + return info.Mode().IsRegular() +} + +// InterpretLabelForWorkspaceLocation returns the name of the BUILD file to +// edit, the full package name, and the rule. It takes a workspace-rooted +// directory to use. +func InterpretLabelForWorkspaceLocation(root string, target string) (buildFile string, pkg string, rule string) { + repo, pkg, rule := ParseLabel(target) + rootDir, relativePath := wspace.FindWorkspaceRoot(root) + if repo != "" { + files, err := wspace.FindRepoBuildFiles(rootDir) + if err == nil { + if buildFile, ok := files[repo]; ok { + return buildFile, pkg, rule + } + } + // TODO(rodrigoq): report error for other repos + } + + if strings.HasPrefix(target, "//") { + buildFile = path.Join(rootDir, pkg, "BUILD") + return + } + if isFile(pkg) { + // allow operation on other files like WORKSPACE + buildFile = pkg + pkg = path.Join(relativePath, filepath.Dir(pkg)) + return + } + if pkg != "" { + buildFile = pkg + "/BUILD" + } else { + buildFile = "BUILD" + } + pkg = path.Join(relativePath, pkg) + return +} + +// InterpretLabel returns the name of the BUILD file to edit, the full +// package name, and the rule. It uses the pwd for resolving workspace file paths. +func InterpretLabel(target string) (buildFile string, pkg string, rule string) { + return InterpretLabelForWorkspaceLocation("", target) +} + +// ExprToRule returns a Rule from an Expr. +// The boolean is false iff the Expr is not a function call, or does not have +// the expected kind. +func ExprToRule(expr build.Expr, kind string) (*build.Rule, bool) { + call, ok := expr.(*build.CallExpr) + if !ok { + return nil, false + } + k, ok := call.X.(*build.LiteralExpr) + if !ok || k.Token != kind { + return nil, false + } + return &build.Rule{Call: call}, true +} + +// ExistingPackageDeclaration returns the package declaration, or nil if there is none. +func ExistingPackageDeclaration(f *build.File) *build.Rule { + for _, stmt := range f.Stmt { + if rule, ok := ExprToRule(stmt, "package"); ok { + return rule + } + } + return nil +} + +// PackageDeclaration returns the package declaration. If it doesn't +// exist, it is created at the top of the BUILD file, after leading +// comments. +func PackageDeclaration(f *build.File) *build.Rule { + if pkg := ExistingPackageDeclaration(f); pkg != nil { + return pkg + } + all := []build.Expr{} + added := false + call := &build.CallExpr{X: &build.LiteralExpr{Token: "package"}} + // Skip CommentBlocks and find a place to insert the package declaration. + for _, stmt := range f.Stmt { + _, ok := stmt.(*build.CommentBlock) + if !ok && !added { + all = append(all, call) + added = true + } + all = append(all, stmt) + } + if !added { // In case the file is empty. + all = append(all, call) + } + f.Stmt = all + return &build.Rule{Call: call} +} + +// RemoveEmptyPackage removes empty package declarations from the file, i.e.: +// package() +// This might appear because of a buildozer transformation (e.g. when removing a package +// attribute). Removing it is required for the file to be valid. +func RemoveEmptyPackage(f *build.File) *build.File { + var all []build.Expr + for _, stmt := range f.Stmt { + if call, ok := stmt.(*build.CallExpr); ok { + functionName, ok := call.X.(*build.LiteralExpr) + if ok && functionName.Token == "package" && len(call.List) == 0 { + continue + } + } + all = append(all, stmt) + } + return &build.File{Path: f.Path, Comments: f.Comments, Stmt: all} +} + +// InsertAfter inserts an expression after index i. +func InsertAfter(i int, stmt []build.Expr, expr build.Expr) []build.Expr { + i = i + 1 // index after the element at i + result := make([]build.Expr, len(stmt)+1) + copy(result[0:i], stmt[0:i]) + result[i] = expr + copy(result[i+1:], stmt[i:]) + return result +} + +// IndexOfLast finds the index of the last expression of a specific kind. +func IndexOfLast(stmt []build.Expr, Kind string) int { + lastIndex := -1 + for i, s := range stmt { + sAsCallExpr, ok := s.(*build.CallExpr) + if !ok { + continue + } + literal, ok := sAsCallExpr.X.(*build.LiteralExpr) + if ok && literal.Token == Kind { + lastIndex = i + } + } + return lastIndex +} + +// InsertAfterLastOfSameKind inserts an expression after the last expression of the same kind. +func InsertAfterLastOfSameKind(stmt []build.Expr, expr *build.CallExpr) []build.Expr { + index := IndexOfLast(stmt, expr.X.(*build.LiteralExpr).Token) + if index == -1 { + return InsertAtEnd(stmt, expr) + } + return InsertAfter(index, stmt, expr) +} + +// InsertAtEnd inserts an expression at the end of a list, before trailing comments. +func InsertAtEnd(stmt []build.Expr, expr build.Expr) []build.Expr { + var i int + for i = len(stmt) - 1; i >= 0; i-- { + _, ok := stmt[i].(*build.CommentBlock) + if !ok { + break + } + } + return InsertAfter(i, stmt, expr) +} + +// FindRuleByName returns the rule in the file that has the given name. +// If the name is "__pkg__", it returns the global package declaration. +func FindRuleByName(f *build.File, name string) *build.Rule { + if name == "__pkg__" { + return PackageDeclaration(f) + } + i := IndexOfRuleByName(f, name) + if i != -1 { + return &build.Rule{Call: f.Stmt[i].(*build.CallExpr)} + } + return nil +} + +// UseImplicitName returns the rule in the file if it meets these conditions: +// - It is the only unnamed rule in the file. +// - The file path's ending directory name and the passed rule name match. +// In the Pants Build System, by pantsbuild, the use of an implicit name makes +// creating targets easier. This function implements such names. +func UseImplicitName(f *build.File, rule string) *build.Rule { + // We disallow empty names + if f.Path == "BUILD" { + return nil + } + ruleCount := 0 + var temp, found *build.Rule + pkg := filepath.Base(filepath.Dir(f.Path)) + + for _, stmt := range f.Stmt { + call, ok := stmt.(*build.CallExpr) + if !ok { + continue + } + temp = &build.Rule{Call: call} + if temp.Kind() != "" && temp.Name() == "" { + ruleCount++ + found = temp + } + } + + if ruleCount == 1 { + if rule == pkg { + return found + } + } + return nil +} + +// IndexOfRuleByName returns the index (in f.Stmt) of the CallExpr which defines a rule named `name`, or -1 if it doesn't exist. +func IndexOfRuleByName(f *build.File, name string) int { + linenum := -1 + if strings.HasPrefix(name, "%") { + // "%" will match the rule which begins at LINENUM. + // This is for convenience, "%" is not a valid character in bazel targets. + if result, err := strconv.Atoi(name[1:]); err == nil { + linenum = result + } + } + + for i, stmt := range f.Stmt { + call, ok := stmt.(*build.CallExpr) + if !ok { + continue + } + r := &build.Rule{Call: call} + start, _ := call.X.Span() + if r.Name() == name || start.Line == linenum { + return i + } + } + return -1 +} + +// FindExportedFile returns the first exports_files call which contains the +// file 'name', or nil if not found +func FindExportedFile(f *build.File, name string) *build.Rule { + for _, r := range f.Rules("exports_files") { + if len(r.Call.List) == 0 { + continue + } + pkg := "" // Files are not affected by the package name + if ListFind(r.Call.List[0], name, pkg) != nil { + return r + } + } + return nil +} + +// DeleteRule returns the AST without the specified rule +func DeleteRule(f *build.File, rule *build.Rule) *build.File { + var all []build.Expr + for _, stmt := range f.Stmt { + if stmt == rule.Call { + continue + } + all = append(all, stmt) + } + return &build.File{Path: f.Path, Comments: f.Comments, Stmt: all} +} + +// DeleteRuleByName returns the AST without the rules that have the +// given name. +func DeleteRuleByName(f *build.File, name string) *build.File { + var all []build.Expr + for _, stmt := range f.Stmt { + call, ok := stmt.(*build.CallExpr) + if !ok { + all = append(all, stmt) + continue + } + r := &build.Rule{Call: call} + if r.Name() != name { + all = append(all, stmt) + } + } + return &build.File{Path: f.Path, Comments: f.Comments, Stmt: all} +} + +// DeleteRuleByKind removes the rules of the specified kind from the AST. +// Returns an updated copy of f. +func DeleteRuleByKind(f *build.File, kind string) *build.File { + var all []build.Expr + for _, stmt := range f.Stmt { + call, ok := stmt.(*build.CallExpr) + if !ok { + all = append(all, stmt) + continue + } + k, ok := call.X.(*build.LiteralExpr) + if !ok || k.Token != kind { + all = append(all, stmt) + } + } + return &build.File{Path: f.Path, Comments: f.Comments, Stmt: all} +} + +// AllLists returns all the lists concatenated in an expression. +// For example, in: glob(["*.go"]) + [":rule"] +// the function will return [[":rule"]]. +func AllLists(e build.Expr) []*build.ListExpr { + switch e := e.(type) { + case *build.ListExpr: + return []*build.ListExpr{e} + case *build.BinaryExpr: + if e.Op == "+" { + return append(AllLists(e.X), AllLists(e.Y)...) + } + } + return nil +} + +// FirstList works in the same way as AllLists, except that it +// returns only one list, or nil. +func FirstList(e build.Expr) *build.ListExpr { + switch e := e.(type) { + case *build.ListExpr: + return e + case *build.BinaryExpr: + if e.Op == "+" { + li := FirstList(e.X) + if li == nil { + return FirstList(e.Y) + } + return li + } + } + return nil +} + +// AllStrings returns all the string literals concatenated in an expression. +// For example, in: "foo" + x + "bar" +// the function will return ["foo", "bar"]. +func AllStrings(e build.Expr) []*build.StringExpr { + switch e := e.(type) { + case *build.StringExpr: + return []*build.StringExpr{e} + case *build.BinaryExpr: + if e.Op == "+" { + return append(AllStrings(e.X), AllStrings(e.Y)...) + } + } + return nil +} + +// ListFind looks for a string in the list expression (which may be a +// concatenation of lists). It returns the element if it is found. nil +// otherwise. +func ListFind(e build.Expr, item string, pkg string) *build.StringExpr { + item = ShortenLabel(item, pkg) + for _, li := range AllLists(e) { + for _, elem := range li.List { + str, ok := elem.(*build.StringExpr) + if ok && LabelsEqual(str.Value, item, pkg) { + return str + } + } + } + return nil +} + +// hasComments returns whether the StringExpr literal has a comment attached to it. +func hasComments(literal *build.StringExpr) bool { + return len(literal.Before) > 0 || len(literal.Suffix) > 0 +} + +// ContainsComments returns whether the expr has a comment that includes str. +func ContainsComments(expr build.Expr, str string) bool { + str = strings.ToLower(str) + com := expr.Comment() + comments := append(com.Before, com.Suffix...) + comments = append(comments, com.After...) + for _, c := range comments { + if strings.Contains(strings.ToLower(c.Token), str) { + return true + } + } + return false +} + +// ListDelete deletes the item from a list expression in e and returns +// the StringExpr deleted, or nil otherwise. +func ListDelete(e build.Expr, item, pkg string) (deleted *build.StringExpr) { + deleted = nil + item = ShortenLabel(item, pkg) + for _, li := range AllLists(e) { + var all []build.Expr + for _, elem := range li.List { + if str, ok := elem.(*build.StringExpr); ok { + if LabelsEqual(str.Value, item, pkg) && (DeleteWithComments || !hasComments(str)) { + deleted = str + continue + } + } + all = append(all, elem) + } + li.List = all + } + return deleted +} + +// ListAttributeDelete deletes string item from list attribute attr, deletes attr if empty, +// and returns the StringExpr deleted, or nil otherwise. +func ListAttributeDelete(rule *build.Rule, attr, item, pkg string) *build.StringExpr { + deleted := ListDelete(rule.Attr(attr), item, pkg) + if deleted != nil { + if listExpr, ok := rule.Attr(attr).(*build.ListExpr); ok && len(listExpr.List) == 0 { + rule.DelAttr(attr) + } + } + return deleted +} + +// ListReplace replaces old with value in all lists in e and returns a Boolean +// to indicate whether the replacement was successful. +func ListReplace(e build.Expr, old, value, pkg string) bool { + replaced := false + old = ShortenLabel(old, pkg) + for _, li := range AllLists(e) { + for k, elem := range li.List { + str, ok := elem.(*build.StringExpr) + if !ok || !LabelsEqual(str.Value, old, pkg) { + continue + } + li.List[k] = &build.StringExpr{Value: ShortenLabel(value, pkg), Comments: *elem.Comment()} + replaced = true + } + } + return replaced +} + +// isExprLessThan compares two Expr statements. Currently, only labels are supported. +func isExprLessThan(x1, x2 build.Expr) bool { + str1, ok1 := x1.(*build.StringExpr) + str2, ok2 := x2.(*build.StringExpr) + if ok1 != ok2 { + return ok2 + } + if ok1 && ok2 { + // Labels starting with // are put at the end. + pre1 := strings.HasPrefix(str1.Value, "//") + pre2 := strings.HasPrefix(str2.Value, "//") + if pre1 != pre2 { + return pre2 + } + return str1.Value < str2.Value + } + return false +} + +func sortedInsert(list []build.Expr, item build.Expr) []build.Expr { + i := 0 + for ; i < len(list); i++ { + if isExprLessThan(item, list[i]) { + break + } + } + res := make([]build.Expr, 0, len(list)+1) + res = append(res, list[:i]...) + res = append(res, item) + res = append(res, list[i:]...) + return res +} + +// attributeMustNotBeSorted returns true if the list in the attribute cannot be +// sorted. For some attributes, it makes sense to try to do a sorted insert +// (e.g. deps), even when buildifier will not sort it for conservative reasons. +// For a few attributes, sorting will never make sense. +func attributeMustNotBeSorted(rule, attr string) bool { + // TODO(bazel-team): Come up with a more complete list. + return attr == "args" +} + +// getVariable returns the binary expression that assignes a variable to expr, if expr is +// an identifier of a variable that vars contains a mapping for. +func getVariable(expr build.Expr, vars *map[string]*build.BinaryExpr) (varAssignment *build.BinaryExpr) { + if vars == nil { + return nil + } + + if literal, ok := expr.(*build.LiteralExpr); ok { + if varAssignment = (*vars)[literal.Token]; varAssignment != nil { + return varAssignment + } + } + return nil +} + +// AddValueToList adds a value to a list. If the expression is +// not a list, a list with a single element is appended to the original +// expression. +func AddValueToList(oldList build.Expr, pkg string, item build.Expr, sorted bool) build.Expr { + if oldList == nil { + return &build.ListExpr{List: []build.Expr{item}} + } + + str, ok := item.(*build.StringExpr) + if ok && ListFind(oldList, str.Value, pkg) != nil { + // The value is already in the list. + return oldList + } + li := FirstList(oldList) + if li != nil { + if sorted { + li.List = sortedInsert(li.List, item) + } else { + li.List = append(li.List, item) + } + return oldList + } + list := &build.ListExpr{List: []build.Expr{item}} + concat := &build.BinaryExpr{Op: "+", X: oldList, Y: list} + return concat +} + +// AddValueToListAttribute adds the given item to the list attribute identified by name and pkg. +func AddValueToListAttribute(r *build.Rule, name string, pkg string, item build.Expr, vars *map[string]*build.BinaryExpr) { + old := r.Attr(name) + sorted := !attributeMustNotBeSorted(r.Kind(), name) + if varAssignment := getVariable(old, vars); varAssignment != nil { + varAssignment.Y = AddValueToList(varAssignment.Y, pkg, item, sorted) + } else { + r.SetAttr(name, AddValueToList(old, pkg, item, sorted)) + } +} + +// MoveAllListAttributeValues moves all values from list attribute oldAttr to newAttr, +// and deletes oldAttr. +func MoveAllListAttributeValues(rule *build.Rule, oldAttr, newAttr, pkg string, vars *map[string]*build.BinaryExpr) error { + if rule.Attr(oldAttr) == nil { + return fmt.Errorf("no attribute %s found in %s", oldAttr, rule.Name()) + } + if rule.Attr(newAttr) == nil { + RenameAttribute(rule, oldAttr, newAttr) + return nil + } + if listExpr, ok := rule.Attr(oldAttr).(*build.ListExpr); ok { + for _, val := range listExpr.List { + AddValueToListAttribute(rule, newAttr, pkg, val, vars) + } + rule.DelAttr(oldAttr) + return nil + } + return fmt.Errorf("%s already exists and %s is not a simple list", newAttr, oldAttr) +} + +// DictionarySet looks for the key in the dictionary expression. If value is not nil, +// it replaces the current value with it. In all cases, it returns the current value. +func DictionarySet(dict *build.DictExpr, key string, value build.Expr) build.Expr { + for _, e := range dict.List { + kv, _ := e.(*build.KeyValueExpr) + if k, ok := kv.Key.(*build.StringExpr); ok && k.Value == key { + if value != nil { + kv.Value = value + } + return kv.Value + } + } + if value != nil { + kv := &build.KeyValueExpr{Key: &build.StringExpr{Value: key}, Value: value} + dict.List = append(dict.List, kv) + } + return nil +} + +// RenameAttribute renames an attribute in a rule. +func RenameAttribute(r *build.Rule, oldName, newName string) error { + if r.Attr(newName) != nil { + return fmt.Errorf("attribute %s already exists in rule %s", newName, r.Name()) + } + for _, kv := range r.Call.List { + as, ok := kv.(*build.BinaryExpr) + if !ok || as.Op != "=" { + continue + } + k, ok := as.X.(*build.LiteralExpr) + if !ok || k.Token != oldName { + continue + } + k.Token = newName + return nil + } + return fmt.Errorf("no attribute %s found in rule %s", oldName, r.Name()) +} + +// EditFunction is a wrapper around build.Edit. The callback is called only on +// functions 'name'. +func EditFunction(v build.Expr, name string, f func(x *build.CallExpr, stk []build.Expr) build.Expr) build.Expr { + return build.Edit(v, func(expr build.Expr, stk []build.Expr) build.Expr { + call, ok := expr.(*build.CallExpr) + if !ok { + return nil + } + fct, ok := call.X.(*build.LiteralExpr) + if !ok || fct.Token != name { + return nil + } + return f(call, stk) + }) +} + +// UsedSymbols returns the set of symbols used in the BUILD file (variables, function names). +func UsedSymbols(f *build.File) map[string]bool { + symbols := make(map[string]bool) + build.Walk(f, func(expr build.Expr, stack []build.Expr) { + literal, ok := expr.(*build.LiteralExpr) + if !ok { + return + } + // Check if we are on the left-side of an assignment + for _, e := range stack { + if as, ok := e.(*build.BinaryExpr); ok { + if as.Op == "=" && as.X == expr { + return + } + } + } + symbols[literal.Token] = true + }) + return symbols +} + +func newLoad(args []string) *build.CallExpr { + load := &build.CallExpr{ + X: &build.LiteralExpr{ + Token: "load", + }, + List: []build.Expr{}, + ForceCompact: true, + } + for _, a := range args { + load.List = append(load.List, &build.StringExpr{Value: a}) + } + return load +} + +// appendLoad tries to find an existing load location and append symbols to it. +func appendLoad(stmts []build.Expr, args []string) bool { + if len(args) == 0 { + return false + } + location := args[0] + symbolsToLoad := make(map[string]bool) + for _, s := range args[1:] { + symbolsToLoad[s] = true + } + var lastLoad *build.CallExpr + for _, s := range stmts { + call, ok := s.(*build.CallExpr) + if !ok { + continue + } + if l, ok := call.X.(*build.LiteralExpr); !ok || l.Token != "load" { + continue + } + if len(call.List) < 2 { + continue + } + if s, ok := call.List[0].(*build.StringExpr); !ok || s.Value != location { + continue // Loads a different file. + } + for _, arg := range call.List[1:] { + if s, ok := arg.(*build.StringExpr); ok { + delete(symbolsToLoad, s.Value) // Already loaded. + } + } + // Remember the last insert location, but potentially remove more symbols + // that are already loaded in other subsequent calls. + lastLoad = call + } + + if lastLoad == nil { + return false + } + + // Append the remaining loads to the last load location. + sortedSymbols := []string{} + for s := range symbolsToLoad { + sortedSymbols = append(sortedSymbols, s) + } + sort.Strings(sortedSymbols) + for _, s := range sortedSymbols { + lastLoad.List = append(lastLoad.List, &build.StringExpr{Value: s}) + } + return true +} + +// InsertLoad inserts a load statement at the top of the list of statements. +// The load statement is constructed using args. Symbols that are already loaded +// from the given filepath are ignored. If stmts already contains a load for the +// location in arguments, appends the symbols to load to it. +func InsertLoad(stmts []build.Expr, args []string) []build.Expr { + if appendLoad(stmts, args) { + return stmts + } + + load := newLoad(args) + + var all []build.Expr + added := false + for _, stmt := range stmts { + _, isComment := stmt.(*build.CommentBlock) + if isComment || added { + all = append(all, stmt) + continue + } + all = append(all, load) + all = append(all, stmt) + added = true + } + if !added { // Empty file or just comments. + all = append(all, load) + } + return all +} diff --git a/vendor/github.com/bazelbuild/buildtools/edit/edit_test.go b/vendor/github.com/bazelbuild/buildtools/edit/edit_test.go new file mode 100644 index 00000000000..433dcd8e41e --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/edit/edit_test.go @@ -0,0 +1,216 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +package edit + +import ( + "reflect" + "strings" + "testing" + + "github.com/bazelbuild/buildtools/build" +) + +var parseLabelTests = []struct { + in string + repo string + pkg string + rule string +}{ + {"//devtools/buildozer:rule", "", "devtools/buildozer", "rule"}, + {"devtools/buildozer:rule", "", "devtools/buildozer", "rule"}, + {"//devtools/buildozer", "", "devtools/buildozer", "buildozer"}, + {"//base", "", "base", "base"}, + {"//base:", "", "base", "base"}, + {"@r//devtools/buildozer:rule", "r", "devtools/buildozer", "rule"}, + {"@r//devtools/buildozer", "r", "devtools/buildozer", "buildozer"}, + {"@r//base", "r", "base", "base"}, + {"@r//base:", "r", "base", "base"}, + {"@foo", "foo", "", "foo"}, + {":label", "", "", "label"}, + {"label", "", "", "label"}, + {"/abs/path/to/WORKSPACE:rule", "", "/abs/path/to/WORKSPACE", "rule"}, +} + +func TestParseLabel(t *testing.T) { + for i, tt := range parseLabelTests { + repo, pkg, rule := ParseLabel(tt.in) + if repo != tt.repo || pkg != tt.pkg || rule != tt.rule { + t.Errorf("%d. ParseLabel(%q) => (%q, %q, %q), want (%q, %q, %q)", + i, tt.in, repo, pkg, rule, tt.repo, tt.pkg, tt.rule) + } + } +} + +var shortenLabelTests = []struct { + in string + pkg string + result string +}{ + {"//devtools/buildozer:rule", "devtools/buildozer", ":rule"}, + {"//devtools/buildozer:rule", "devtools", "//devtools/buildozer:rule"}, + {"//base:rule", "devtools", "//base:rule"}, + {"//base:base", "devtools", "//base"}, + {"//base", "base", ":base"}, + {":local", "", ":local"}, + {"something else", "", "something else"}, + {"/path/to/file", "path/to", "/path/to/file"}, +} + +func TestShortenLabel(t *testing.T) { + for i, tt := range shortenLabelTests { + result := ShortenLabel(tt.in, tt.pkg) + if result != tt.result { + t.Errorf("%d. ShortenLabel(%q, %q) => %q, want %q", + i, tt.in, tt.pkg, result, tt.result) + } + } +} + +var labelsEqualTests = []struct { + label1 string + label2 string + pkg string + expected bool +}{ + {"//devtools/buildozer:rule", "rule", "devtools/buildozer", true}, + {"//devtools/buildozer:rule", "rule:jar", "devtools", false}, +} + +func TestLabelsEqual(t *testing.T) { + for i, tt := range labelsEqualTests { + if got := LabelsEqual(tt.label1, tt.label2, tt.pkg); got != tt.expected { + t.Errorf("%d. LabelsEqual(%q, %q, %q) => %v, want %v", + i, tt.label1, tt.label2, tt.pkg, got, tt.expected) + } + } +} + +var splitOnSpacesTests = []struct { + in string + out []string +}{ + {"a", []string{"a"}}, + {" abc def ", []string{"abc", "def"}}, + {` abc\ def `, []string{"abc def"}}, +} + +func TestSplitOnSpaces(t *testing.T) { + for i, tt := range splitOnSpacesTests { + result := SplitOnSpaces(tt.in) + if !reflect.DeepEqual(result, tt.out) { + t.Errorf("%d. SplitOnSpaces(%q) => %q, want %q", + i, tt.in, result, tt.out) + } + } +} + +func TestInsertLoad(t *testing.T) { + tests := []struct{ input, expected string }{ + {``, `load("location", "symbol")`}, + {`load("location", "symbol")`, `load("location", "symbol")`}, + {`load("location", "other", "symbol")`, `load("location", "other", "symbol")`}, + {`load("location", "other")`, `load("location", "other", "symbol")`}, + { + `load("other loc", "symbol")`, + `load("location", "symbol") +load("other loc", "symbol")`, + }, + } + + for _, tst := range tests { + bld, err := build.Parse("BUILD", []byte(tst.input)) + if err != nil { + t.Error(err) + continue + } + bld.Stmt = InsertLoad(bld.Stmt, []string{"location", "symbol"}) + got := strings.TrimSpace(string(build.Format(bld))) + if got != tst.expected { + t.Errorf("maybeInsertLoad(%s): got %s, expected %s", tst.input, got, tst.expected) + } + } +} + +func TestAddValueToListAttribute(t *testing.T) { + tests := []struct{ input, expected string }{ + {`rule(name="rule")`, `rule(name="rule", attr=["foo"])`}, + {`rule(name="rule", attr=["foo"])`, `rule(name="rule", attr=["foo"])`}, + {`rule(name="rule", attr=IDENT)`, `rule(name="rule", attr=IDENT+["foo"])`}, + {`rule(name="rule", attr=["foo"] + IDENT)`, `rule(name="rule", attr=["foo"] + IDENT)`}, + {`rule(name="rule", attr=["bar"] + IDENT)`, `rule(name="rule", attr=["bar", "foo"] + IDENT)`}, + {`rule(name="rule", attr=IDENT + ["foo"])`, `rule(name="rule", attr=IDENT + ["foo"])`}, + {`rule(name="rule", attr=IDENT + ["bar"])`, `rule(name="rule", attr=IDENT + ["bar", "foo"])`}, + } + + for _, tst := range tests { + bld, err := build.Parse("BUILD", []byte(tst.input)) + if err != nil { + t.Error(err) + continue + } + rule := bld.RuleAt(1) + AddValueToListAttribute(rule, "attr", "", &build.StringExpr{Value: "foo"}, nil) + got := strings.TrimSpace(string(build.Format(bld))) + + wantBld, err := build.Parse("BUILD", []byte(tst.expected)) + if err != nil { + t.Error(err) + continue + } + want := strings.TrimSpace(string(build.Format(wantBld))) + if got != want { + t.Errorf("AddValueToListAttribute(%s): got %s, expected %s", tst.input, got, want) + } + } +} + +func TestUseImplicitName(t *testing.T) { + tests := []struct { + input string + expectedRuleLine int + wantErr bool + wantRootErr bool + description string + }{ + {`rule()`, 1, false, false, `Use an implicit name for one rule.`}, + {`rule(name="a") + rule(name="b") + rule()`, 3, false, false, `Use an implicit name for the one unnamed rule`}, + {`rule() rule() rule()`, 1, true, false, `Error for multiple unnamed rules`}, + {`rule()`, 1, true, true, `Error for the root package`}, + } + + for _, tst := range tests { + path := "foo/BUILD" + if tst.wantRootErr { + path = "BUILD" + } + bld, err := build.Parse(path, []byte(tst.input)) + if err != nil { + t.Error(tst.description, err) + continue + } + got := UseImplicitName(bld, "foo") + + if !tst.wantErr { + want := bld.RuleAt(tst.expectedRuleLine) + if got.Kind() != want.Kind() || got.Name() != want.Name() { + t.Errorf("UseImplicitName(%s): got %s, expected %s. %s", tst.input, got, want, tst.description) + } + } else { + if got != nil { + t.Errorf("UseImplicitName(%s): got %s, expected nil. %s", tst.input, got, tst.description) + } + } + } +} diff --git a/vendor/github.com/bazelbuild/buildtools/edit/fix.go b/vendor/github.com/bazelbuild/buildtools/edit/fix.go new file mode 100644 index 00000000000..e47183fe5d6 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/edit/fix.go @@ -0,0 +1,569 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Functions to clean and fix BUILD files + +package edit + +import ( + "regexp" + "sort" + "strings" + + "github.com/bazelbuild/buildtools/build" +) + +// splitOptionsWithSpaces is a cleanup function. +// It splits options strings that contain a space. This change +// should be safe as Blaze is splitting those strings, but we will +// eventually get rid of this misfeature. +// eg. it converts from: +// copts = ["-Dfoo -Dbar"] +// to: +// copts = ["-Dfoo", "-Dbar"] +func splitOptionsWithSpaces(_ *build.File, r *build.Rule, _ string) bool { + var attrToRewrite = []string{ + "copts", + "linkopts", + } + fixed := false + for _, attrName := range attrToRewrite { + attr := r.Attr(attrName) + if attr != nil { + for _, li := range AllLists(attr) { + fixed = splitStrings(li) || fixed + } + } + } + return fixed +} + +func splitStrings(list *build.ListExpr) bool { + var all []build.Expr + fixed := false + for _, e := range list.List { + str, ok := e.(*build.StringExpr) + if !ok { + all = append(all, e) + continue + } + if strings.Contains(str.Value, " ") && !strings.Contains(str.Value, "'\"") { + fixed = true + for i, substr := range strings.Fields(str.Value) { + item := &build.StringExpr{Value: substr} + if i == 0 { + item.Comments = str.Comments + } + all = append(all, item) + } + } else { + all = append(all, str) + } + } + list.List = all + return fixed +} + +// shortenLabels rewrites the labels in the rule using the short notation. +func shortenLabels(_ *build.File, r *build.Rule, pkg string) bool { + fixed := false + for _, attr := range r.AttrKeys() { + e := r.Attr(attr) + if !ContainsLabels(attr) { + continue + } + for _, li := range AllLists(e) { + for _, elem := range li.List { + str, ok := elem.(*build.StringExpr) + if ok && str.Value != ShortenLabel(str.Value, pkg) { + str.Value = ShortenLabel(str.Value, pkg) + fixed = true + } + } + } + } + return fixed +} + +// removeVisibility removes useless visibility attributes. +func removeVisibility(f *build.File, r *build.Rule, pkg string) bool { + pkgDecl := PackageDeclaration(f) + defaultVisibility := pkgDecl.AttrStrings("default_visibility") + + // If no default_visibility is given, it is implicitly private. + if len(defaultVisibility) == 0 { + defaultVisibility = []string{"//visibility:private"} + } + + visibility := r.AttrStrings("visibility") + if len(visibility) == 0 || len(visibility) != len(defaultVisibility) { + return false + } + sort.Strings(defaultVisibility) + sort.Strings(visibility) + for i, vis := range visibility { + if vis != defaultVisibility[i] { + return false + } + } + r.DelAttr("visibility") + return true +} + +// removeTestOnly removes the useless testonly attributes. +func removeTestOnly(f *build.File, r *build.Rule, pkg string) bool { + pkgDecl := PackageDeclaration(f) + + def := strings.HasSuffix(r.Kind(), "_test") || r.Kind() == "test_suite" + if !def { + if pkgDecl.Attr("default_testonly") == nil { + def = strings.HasPrefix(pkg, "javatests/") + } else if pkgDecl.AttrLiteral("default_testonly") == "1" { + def = true + } else if pkgDecl.AttrLiteral("default_testonly") != "0" { + // Non-literal value: it's not safe to do a change. + return false + } + } + + testonly := r.AttrLiteral("testonly") + if def && testonly == "1" { + r.DelAttr("testonly") + return true + } + if !def && testonly == "0" { + r.DelAttr("testonly") + return true + } + return false +} + +func genruleRenameDepsTools(_ *build.File, r *build.Rule, _ string) bool { + return r.Kind() == "genrule" && RenameAttribute(r, "deps", "tools") == nil +} + +// explicitHeuristicLabels adds $(location ...) for each label in the string s. +func explicitHeuristicLabels(s string, labels map[string]bool) string { + // Regexp comes from LABEL_CHAR_MATCHER in + // java/com/google/devtools/build/lib/analysis/LabelExpander.java + re := regexp.MustCompile("[a-zA-Z0-9:/_.+-]+|[^a-zA-Z0-9:/_.+-]+") + parts := re.FindAllString(s, -1) + changed := false + canChange := true + for i, part := range parts { + // We don't want to add $(location when it's already present. + // So we skip the next label when we see location(s). + if part == "location" || part == "locations" { + canChange = false + } + if !labels[part] { + if labels[":"+part] { // leading colon is often missing + part = ":" + part + } else { + continue + } + } + + if !canChange { + canChange = true + continue + } + parts[i] = "$(location " + part + ")" + changed = true + } + if changed { + return strings.Join(parts, "") + } + return s +} + +func addLabels(r *build.Rule, attr string, labels map[string]bool) { + a := r.Attr(attr) + if a == nil { + return + } + for _, li := range AllLists(a) { + for _, item := range li.List { + if str, ok := item.(*build.StringExpr); ok { + labels[str.Value] = true + } + } + } +} + +// genruleFixHeuristicLabels modifies the cmd attribute of genrules, so +// that they don't rely on heuristic label expansion anymore. +// Label expansion is made explicit with the $(location ...) command. +func genruleFixHeuristicLabels(_ *build.File, r *build.Rule, _ string) bool { + if r.Kind() != "genrule" { + return false + } + + cmd := r.Attr("cmd") + if cmd == nil { + return false + } + labels := make(map[string]bool) + addLabels(r, "tools", labels) + addLabels(r, "srcs", labels) + + fixed := false + for _, str := range AllStrings(cmd) { + newVal := explicitHeuristicLabels(str.Value, labels) + if newVal != str.Value { + fixed = true + str.Value = newVal + } + } + return fixed +} + +// sortExportsFiles sorts the first argument of exports_files if it is a list. +func sortExportsFiles(_ *build.File, r *build.Rule, _ string) bool { + if r.Kind() != "exports_files" || len(r.Call.List) == 0 { + return false + } + build.SortStringList(r.Call.List[0]) + return true +} + +// removeVarref replaces all varref('x') with '$(x)'. +// The goal is to eventually remove varref from the build language. +func removeVarref(_ *build.File, r *build.Rule, _ string) bool { + fixed := false + EditFunction(r.Call, "varref", func(call *build.CallExpr, stk []build.Expr) build.Expr { + if len(call.List) != 1 { + return nil + } + str, ok := (call.List[0]).(*build.StringExpr) + if !ok { + return nil + } + fixed = true + str.Value = "$(" + str.Value + ")" + // Preserve suffix comments from the function call + str.Comment().Suffix = append(str.Comment().Suffix, call.Comment().Suffix...) + return str + }) + return fixed +} + +// sortGlob sorts the list argument to glob. +func sortGlob(_ *build.File, r *build.Rule, _ string) bool { + fixed := false + EditFunction(r.Call, "glob", func(call *build.CallExpr, stk []build.Expr) build.Expr { + if len(call.List) == 0 { + return nil + } + build.SortStringList(call.List[0]) + fixed = true + return call + }) + return fixed +} + +func evaluateListConcatenation(expr build.Expr) build.Expr { + if _, ok := expr.(*build.ListExpr); ok { + return expr + } + bin, ok := expr.(*build.BinaryExpr) + if !ok || bin.Op != "+" { + return expr + } + li1, ok1 := evaluateListConcatenation(bin.X).(*build.ListExpr) + li2, ok2 := evaluateListConcatenation(bin.Y).(*build.ListExpr) + if !ok1 || !ok2 { + return expr + } + res := *li1 + res.List = append(li1.List, li2.List...) + return &res +} + +// mergeLiteralLists evaluates the concatenation of two literal lists. +// e.g. [1, 2] + [3, 4] -> [1, 2, 3, 4] +func mergeLiteralLists(_ *build.File, r *build.Rule, _ string) bool { + fixed := false + build.Edit(r.Call, func(expr build.Expr, stk []build.Expr) build.Expr { + newexpr := evaluateListConcatenation(expr) + fixed = fixed || (newexpr != expr) + return newexpr + }) + return fixed +} + +// usePlusEqual replaces uses of extend and append with the += operator. +// e.g. foo.extend(bar) => foo += bar +// foo.append(bar) => foo += [bar] +func usePlusEqual(f *build.File) bool { + fixed := false + for i, stmt := range f.Stmt { + call, ok := stmt.(*build.CallExpr) + if !ok { + continue + } + dot, ok := call.X.(*build.DotExpr) + if !ok || len(call.List) != 1 { + continue + } + obj, ok := dot.X.(*build.LiteralExpr) + if !ok { + continue + } + + var fix *build.BinaryExpr + if dot.Name == "extend" { + fix = &build.BinaryExpr{X: obj, Op: "+=", Y: call.List[0]} + } else if dot.Name == "append" { + list := &build.ListExpr{List: []build.Expr{call.List[0]}} + fix = &build.BinaryExpr{X: obj, Op: "+=", Y: list} + } else { + continue + } + fix.Comments = call.Comments // Keep original comments + f.Stmt[i] = fix + fixed = true + } + return fixed +} + +func isNonemptyComment(comment *build.Comments) bool { + return len(comment.Before)+len(comment.Suffix)+len(comment.After) > 0 +} + +// Checks whether a call or any of its arguments have a comment +func hasComment(call *build.CallExpr) bool { + if isNonemptyComment(call.Comment()) { + return true + } + for _, arg := range call.List { + if isNonemptyComment(arg.Comment()) { + return true + } + } + return false +} + +// cleanUnusedLoads removes symbols from load statements that are not used in the file. +// It also cleans symbols loaded multiple times, sorts symbol list, and removes load +// statements when the list is empty. +func cleanUnusedLoads(f *build.File) bool { + // If the file needs preprocessing, leave it alone. + for _, stmt := range f.Stmt { + if _, ok := stmt.(*build.PythonBlock); ok { + return false + } + } + symbols := UsedSymbols(f) + fixed := false + + var all []build.Expr + for _, stmt := range f.Stmt { + rule, ok := ExprToRule(stmt, "load") + if !ok || len(rule.Call.List) == 0 || hasComment(rule.Call) { + all = append(all, stmt) + continue + } + var args []build.Expr + for _, arg := range rule.Call.List[1:] { // first argument is the path, we keep it + symbol, ok := loadedSymbol(arg) + if !ok || symbols[symbol] { + args = append(args, arg) + if ok { + // If the same symbol is loaded twice, we'll remove it. + delete(symbols, symbol) + } + } else { + fixed = true + } + } + if len(args) > 0 { // Keep the load statement if it loads at least one symbol. + li := &build.ListExpr{List: args} + build.SortStringList(li) + rule.Call.List = append(rule.Call.List[:1], li.List...) + all = append(all, rule.Call) + } else { + fixed = true + } + } + f.Stmt = all + return fixed +} + +// loadedSymbol parses the symbol token from a load statement argument, +// supporting aliases. +func loadedSymbol(arg build.Expr) (string, bool) { + symbol, ok := arg.(*build.StringExpr) + if ok { + return symbol.Value, ok + } + // try an aliased symbol + if binExpr, ok := arg.(*build.BinaryExpr); ok && binExpr.Op == "=" { + if keyExpr, ok := binExpr.X.(*build.LiteralExpr); ok { + return keyExpr.Token, ok + } + } + return "", false +} + +// movePackageDeclarationToTheTop ensures that the call to package() is done +// before everything else (except comments). +func movePackageDeclarationToTheTop(f *build.File) bool { + pkg := ExistingPackageDeclaration(f) + if pkg == nil { + return false + } + all := []build.Expr{} + inserted := false // true when the package declaration has been inserted + for _, stmt := range f.Stmt { + _, isComment := stmt.(*build.CommentBlock) + _, isBinaryExpr := stmt.(*build.BinaryExpr) // e.g. variable declaration + _, isLoad := ExprToRule(stmt, "load") + if isComment || isBinaryExpr || isLoad { + all = append(all, stmt) + continue + } + if stmt == pkg.Call { + if inserted { + // remove the old package + continue + } + return false // the file was ok + } + if !inserted { + all = append(all, pkg.Call) + inserted = true + } + all = append(all, stmt) + } + f.Stmt = all + return true +} + +// moveToPackage is an auxilliary function used by moveLicensesAndDistribs. +// The function shouldn't appear more than once in the file (depot cleanup has +// been done). +func moveToPackage(f *build.File, attrname string) bool { + var all []build.Expr + fixed := false + for _, stmt := range f.Stmt { + rule, ok := ExprToRule(stmt, attrname) + if !ok || len(rule.Call.List) != 1 { + all = append(all, stmt) + continue + } + pkgDecl := PackageDeclaration(f) + pkgDecl.SetAttr(attrname, rule.Call.List[0]) + pkgDecl.AttrDefn(attrname).Comments = *stmt.Comment() + fixed = true + } + f.Stmt = all + return fixed +} + +// moveLicensesAndDistribs replaces the 'licenses' and 'distribs' functions +// with an attribute in package. +// Before: licenses(["notice"]) +// After: package(licenses = ["notice"]) +func moveLicensesAndDistribs(f *build.File) bool { + fixed1 := moveToPackage(f, "licenses") + fixed2 := moveToPackage(f, "distribs") + return fixed1 || fixed2 +} + +// AllRuleFixes is a list of all Buildozer fixes that can be applied on a rule. +var AllRuleFixes = []struct { + Name string + Fn func(file *build.File, rule *build.Rule, pkg string) bool + Message string +}{ + {"sortGlob", sortGlob, + "Sort the list in a call to glob"}, + {"splitOptions", splitOptionsWithSpaces, + "Each option should be given separately in the list"}, + {"shortenLabels", shortenLabels, + "Style: Use the canonical label notation"}, + {"removeVisibility", removeVisibility, + "This visibility attribute is useless (it corresponds to the default value)"}, + {"removeTestOnly", removeTestOnly, + "This testonly attribute is useless (it corresponds to the default value)"}, + {"genruleRenameDepsTools", genruleRenameDepsTools, + "'deps' attribute in genrule has been renamed 'tools'"}, + {"genruleFixHeuristicLabels", genruleFixHeuristicLabels, + "$(location) should be called explicitely"}, + {"sortExportsFiles", sortExportsFiles, + "Files in exports_files should be sorted"}, + {"varref", removeVarref, + "All varref('foo') should be replaced with '$foo'"}, + {"mergeLiteralLists", mergeLiteralLists, + "Remove useless list concatenation"}, +} + +// FileLevelFixes is a list of all Buildozer fixes that apply on the whole file. +var FileLevelFixes = []struct { + Name string + Fn func(file *build.File) bool + Message string +}{ + {"movePackageToTop", movePackageDeclarationToTheTop, + "The package declaration should be the first rule in a file"}, + {"usePlusEqual", usePlusEqual, + "Prefer '+=' over 'extend' or 'append'"}, + {"unusedLoads", cleanUnusedLoads, + "Remove unused symbols from load statements"}, + {"moveLicensesAndDistribs", moveLicensesAndDistribs, + "Move licenses and distribs to the package function"}, +} + +// FixRule aims to fix errors in BUILD files, remove deprecated features, and +// simplify the code. +func FixRule(f *build.File, pkg string, rule *build.Rule, fixes []string) *build.File { + fixesAsMap := make(map[string]bool) + for _, fix := range fixes { + fixesAsMap[fix] = true + } + fixed := false + for _, fix := range AllRuleFixes { + if len(fixes) == 0 || fixesAsMap[fix.Name] { + fixed = fix.Fn(f, rule, pkg) || fixed + } + } + if !fixed { + return nil + } + return f +} + +// FixFile fixes everything it can in the BUILD file. +func FixFile(f *build.File, pkg string, fixes []string) *build.File { + fixesAsMap := make(map[string]bool) + for _, fix := range fixes { + fixesAsMap[fix] = true + } + fixed := false + for _, rule := range f.Rules("") { + res := FixRule(f, pkg, rule, fixes) + if res != nil { + fixed = true + f = res + } + } + for _, fix := range FileLevelFixes { + if len(fixes) == 0 || fixesAsMap[fix.Name] { + fixed = fix.Fn(f) || fixed + } + } + if !fixed { + return nil + } + return f +} diff --git a/vendor/github.com/bazelbuild/buildtools/edit/types.go b/vendor/github.com/bazelbuild/buildtools/edit/types.go new file mode 100644 index 00000000000..3c05133233e --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/edit/types.go @@ -0,0 +1,69 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Type information for attributes. + +package edit + +import ( + buildpb "github.com/bazelbuild/buildtools/build_proto" + "github.com/bazelbuild/buildtools/lang" + "github.com/bazelbuild/buildtools/tables" +) + +var typeOf = lang.TypeOf + +// IsList returns true for all attributes whose type is a list. +func IsList(attr string) bool { + overrideValue, isOverridden := tables.IsListArg[attr] + if (isOverridden) { + return overrideValue + } + // It stands to reason that a sortable list must be a list. + isSortableList := tables.IsSortableListArg[attr] + if (isSortableList) { + return true + } + ty := typeOf[attr] + return ty == buildpb.Attribute_STRING_LIST || + ty == buildpb.Attribute_LABEL_LIST || + ty == buildpb.Attribute_OUTPUT_LIST || + ty == buildpb.Attribute_FILESET_ENTRY_LIST || + ty == buildpb.Attribute_INTEGER_LIST || + ty == buildpb.Attribute_LICENSE || + ty == buildpb.Attribute_DISTRIBUTION_SET +} + +// IsIntList returns true for all attributes whose type is an int list. +func IsIntList(attr string) bool { + return typeOf[attr] == buildpb.Attribute_INTEGER_LIST +} + +// IsString returns true for all attributes whose type is a string or a label. +func IsString(attr string) bool { + ty := typeOf[attr] + return ty == buildpb.Attribute_LABEL || + ty == buildpb.Attribute_STRING || + ty == buildpb.Attribute_OUTPUT +} + +// IsStringDict returns true for all attributes whose type is a string dictionary. +func IsStringDict(attr string) bool { + return typeOf[attr] == buildpb.Attribute_STRING_DICT +} + +// ContainsLabels returns true for all attributes whose type is a label or a label list. +func ContainsLabels(attr string) bool { + ty := typeOf[attr] + return ty == buildpb.Attribute_LABEL_LIST || + ty == buildpb.Attribute_LABEL +} diff --git a/vendor/github.com/bazelbuild/buildtools/extra_actions_base_proto/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/extra_actions_base_proto/BUILD.bazel new file mode 100644 index 00000000000..c1eda41db6a --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/extra_actions_base_proto/BUILD.bazel @@ -0,0 +1,21 @@ +load("@io_bazel_rules_go//proto:go_proto_library.bzl", "go_proto_library") +load("//build:build_defs.bzl", "genfile_check_test") + +genfile_check_test( + src = "extra_actions_base.gen.pb.go", + gen = "extra_actions_base.pb.go", +) + +genrule( + name = "copy_and_fix", + srcs = ["@io_bazel//src/main/protobuf:srcs"], + outs = ["extra_actions_base.proto"], + cmd = ("SRCS=($(locations @io_bazel//src/main/protobuf:srcs));" + + "cp $$(dirname $$SRCS)/extra_actions_base.proto $@"), +) + +go_proto_library( + name = "go_default_library", + srcs = ["extra_actions_base.proto"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/extra_actions_base_proto/extra_actions_base.gen.pb.go b/vendor/github.com/bazelbuild/buildtools/extra_actions_base_proto/extra_actions_base.gen.pb.go new file mode 100755 index 00000000000..23a3819ba50 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/extra_actions_base_proto/extra_actions_base.gen.pb.go @@ -0,0 +1,696 @@ +// Code generated by protoc-gen-go. +// source: extra_actions_base_proto/extra_actions_base.proto +// DO NOT EDIT! + +/* +Package blaze is a generated protocol buffer package. + +It is generated from these files: + extra_actions_base_proto/extra_actions_base.proto + +It has these top-level messages: + ExtraActionSummary + DetailedExtraActionInfo + ExtraActionInfo + EnvironmentVariable + SpawnInfo + CppCompileInfo + CppLinkInfo + JavaCompileInfo + PythonInfo +*/ +package blaze + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A list of extra actions and metadata for the print_action command. +type ExtraActionSummary struct { + Action []*DetailedExtraActionInfo `protobuf:"bytes,1,rep,name=action" json:"action,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtraActionSummary) Reset() { *m = ExtraActionSummary{} } +func (m *ExtraActionSummary) String() string { return proto.CompactTextString(m) } +func (*ExtraActionSummary) ProtoMessage() {} +func (*ExtraActionSummary) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *ExtraActionSummary) GetAction() []*DetailedExtraActionInfo { + if m != nil { + return m.Action + } + return nil +} + +// An individual action printed by the print_action command. +type DetailedExtraActionInfo struct { + // If the given action was included in the output due to a request for a + // specific file, then this field contains the name of that file so that the + // caller can correctly associate the extra action with that file. + // + // The data in this message is currently not sufficient to run the action on a + // production machine, because not all necessary input files are identified, + // especially for C++. + // + // There is no easy way to fix this; we could require that all header files + // are declared and then add all of them here (which would be a huge superset + // of the files that are actually required), or we could run the include + // scanner and add those files here. + TriggeringFile *string `protobuf:"bytes,1,opt,name=triggering_file,json=triggeringFile" json:"triggering_file,omitempty"` + // The actual action. + Action *ExtraActionInfo `protobuf:"bytes,2,req,name=action" json:"action,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *DetailedExtraActionInfo) Reset() { *m = DetailedExtraActionInfo{} } +func (m *DetailedExtraActionInfo) String() string { return proto.CompactTextString(m) } +func (*DetailedExtraActionInfo) ProtoMessage() {} +func (*DetailedExtraActionInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *DetailedExtraActionInfo) GetTriggeringFile() string { + if m != nil && m.TriggeringFile != nil { + return *m.TriggeringFile + } + return "" +} + +func (m *DetailedExtraActionInfo) GetAction() *ExtraActionInfo { + if m != nil { + return m.Action + } + return nil +} + +// Provides information to an extra_action on the original action it is +// shadowing. +type ExtraActionInfo struct { + // The label of the ActionOwner of the shadowed action. + Owner *string `protobuf:"bytes,1,opt,name=owner" json:"owner,omitempty"` + // Only set if the owner is an Aspect. + // Corresponds to AspectValue.AspectKey.getAspectClass.getName() + // This field is deprecated as there might now be + // multiple aspects applied to the same target. + // This is the aspect name of the last aspect + // in 'aspects' (8) field. + AspectName *string `protobuf:"bytes,6,opt,name=aspect_name,json=aspectName" json:"aspect_name,omitempty"` + // Only set if the owner is an Aspect. + // Corresponds to AspectValue.AspectKey.getParameters() + // This field is deprecated as there might now be + // multiple aspects applied to the same target. + // These are the aspect parameters of the last aspect + // in 'aspects' (8) field. + AspectParameters map[string]*ExtraActionInfo_StringList `protobuf:"bytes,7,rep,name=aspect_parameters,json=aspectParameters" json:"aspect_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // If the owner is an aspect, all aspects applied to the target + Aspects []*ExtraActionInfo_AspectDescriptor `protobuf:"bytes,8,rep,name=aspects" json:"aspects,omitempty"` + // An id uniquely describing the shadowed action at the ActionOwner level. + Id *string `protobuf:"bytes,2,opt,name=id" json:"id,omitempty"` + // The mnemonic of the shadowed action. Used to distinguish actions with the + // same ActionType. + Mnemonic *string `protobuf:"bytes,5,opt,name=mnemonic" json:"mnemonic,omitempty"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtraActionInfo) Reset() { *m = ExtraActionInfo{} } +func (m *ExtraActionInfo) String() string { return proto.CompactTextString(m) } +func (*ExtraActionInfo) ProtoMessage() {} +func (*ExtraActionInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +var extRange_ExtraActionInfo = []proto.ExtensionRange{ + {1000, 536870911}, +} + +func (*ExtraActionInfo) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ExtraActionInfo +} + +func (m *ExtraActionInfo) GetOwner() string { + if m != nil && m.Owner != nil { + return *m.Owner + } + return "" +} + +func (m *ExtraActionInfo) GetAspectName() string { + if m != nil && m.AspectName != nil { + return *m.AspectName + } + return "" +} + +func (m *ExtraActionInfo) GetAspectParameters() map[string]*ExtraActionInfo_StringList { + if m != nil { + return m.AspectParameters + } + return nil +} + +func (m *ExtraActionInfo) GetAspects() []*ExtraActionInfo_AspectDescriptor { + if m != nil { + return m.Aspects + } + return nil +} + +func (m *ExtraActionInfo) GetId() string { + if m != nil && m.Id != nil { + return *m.Id + } + return "" +} + +func (m *ExtraActionInfo) GetMnemonic() string { + if m != nil && m.Mnemonic != nil { + return *m.Mnemonic + } + return "" +} + +type ExtraActionInfo_StringList struct { + Value []string `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtraActionInfo_StringList) Reset() { *m = ExtraActionInfo_StringList{} } +func (m *ExtraActionInfo_StringList) String() string { return proto.CompactTextString(m) } +func (*ExtraActionInfo_StringList) ProtoMessage() {} +func (*ExtraActionInfo_StringList) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 1} } + +func (m *ExtraActionInfo_StringList) GetValue() []string { + if m != nil { + return m.Value + } + return nil +} + +type ExtraActionInfo_AspectDescriptor struct { + // Corresponds to AspectDescriptor.getName() + AspectName *string `protobuf:"bytes,1,opt,name=aspect_name,json=aspectName" json:"aspect_name,omitempty"` + // Corresponds to AspectDescriptor.getParameters() + AspectParameters map[string]*ExtraActionInfo_AspectDescriptor_StringList `protobuf:"bytes,2,rep,name=aspect_parameters,json=aspectParameters" json:"aspect_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtraActionInfo_AspectDescriptor) Reset() { *m = ExtraActionInfo_AspectDescriptor{} } +func (m *ExtraActionInfo_AspectDescriptor) String() string { return proto.CompactTextString(m) } +func (*ExtraActionInfo_AspectDescriptor) ProtoMessage() {} +func (*ExtraActionInfo_AspectDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{2, 2} +} + +func (m *ExtraActionInfo_AspectDescriptor) GetAspectName() string { + if m != nil && m.AspectName != nil { + return *m.AspectName + } + return "" +} + +func (m *ExtraActionInfo_AspectDescriptor) GetAspectParameters() map[string]*ExtraActionInfo_AspectDescriptor_StringList { + if m != nil { + return m.AspectParameters + } + return nil +} + +type ExtraActionInfo_AspectDescriptor_StringList struct { + Value []string `protobuf:"bytes,1,rep,name=value" json:"value,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *ExtraActionInfo_AspectDescriptor_StringList) Reset() { + *m = ExtraActionInfo_AspectDescriptor_StringList{} +} +func (m *ExtraActionInfo_AspectDescriptor_StringList) String() string { + return proto.CompactTextString(m) +} +func (*ExtraActionInfo_AspectDescriptor_StringList) ProtoMessage() {} +func (*ExtraActionInfo_AspectDescriptor_StringList) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{2, 2, 1} +} + +func (m *ExtraActionInfo_AspectDescriptor_StringList) GetValue() []string { + if m != nil { + return m.Value + } + return nil +} + +type EnvironmentVariable struct { + // It is possible that this name is not a valid variable identifier. + Name *string `protobuf:"bytes,1,req,name=name" json:"name,omitempty"` + // The value is unescaped and unquoted. + Value *string `protobuf:"bytes,2,req,name=value" json:"value,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *EnvironmentVariable) Reset() { *m = EnvironmentVariable{} } +func (m *EnvironmentVariable) String() string { return proto.CompactTextString(m) } +func (*EnvironmentVariable) ProtoMessage() {} +func (*EnvironmentVariable) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *EnvironmentVariable) GetName() string { + if m != nil && m.Name != nil { + return *m.Name + } + return "" +} + +func (m *EnvironmentVariable) GetValue() string { + if m != nil && m.Value != nil { + return *m.Value + } + return "" +} + +// Provides access to data that is specific to spawn actions. +// Usually provided by actions using the "Spawn" & "Genrule" Mnemonics. +type SpawnInfo struct { + Argument []string `protobuf:"bytes,1,rep,name=argument" json:"argument,omitempty"` + // A list of environment variables and their values. No order is enforced. + Variable []*EnvironmentVariable `protobuf:"bytes,2,rep,name=variable" json:"variable,omitempty"` + InputFile []string `protobuf:"bytes,4,rep,name=input_file,json=inputFile" json:"input_file,omitempty"` + OutputFile []string `protobuf:"bytes,5,rep,name=output_file,json=outputFile" json:"output_file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *SpawnInfo) Reset() { *m = SpawnInfo{} } +func (m *SpawnInfo) String() string { return proto.CompactTextString(m) } +func (*SpawnInfo) ProtoMessage() {} +func (*SpawnInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *SpawnInfo) GetArgument() []string { + if m != nil { + return m.Argument + } + return nil +} + +func (m *SpawnInfo) GetVariable() []*EnvironmentVariable { + if m != nil { + return m.Variable + } + return nil +} + +func (m *SpawnInfo) GetInputFile() []string { + if m != nil { + return m.InputFile + } + return nil +} + +func (m *SpawnInfo) GetOutputFile() []string { + if m != nil { + return m.OutputFile + } + return nil +} + +var E_SpawnInfo_SpawnInfo = &proto.ExtensionDesc{ + ExtendedType: (*ExtraActionInfo)(nil), + ExtensionType: (*SpawnInfo)(nil), + Field: 1003, + Name: "blaze.SpawnInfo.spawn_info", + Tag: "bytes,1003,opt,name=spawn_info,json=spawnInfo", + Filename: "extra_actions_base_proto/extra_actions_base.proto", +} + +// Provides access to data that is specific to C++ compile actions. +// Usually provided by actions using the "CppCompile" Mnemonic. +type CppCompileInfo struct { + Tool *string `protobuf:"bytes,1,opt,name=tool" json:"tool,omitempty"` + CompilerOption []string `protobuf:"bytes,2,rep,name=compiler_option,json=compilerOption" json:"compiler_option,omitempty"` + SourceFile *string `protobuf:"bytes,3,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` + OutputFile *string `protobuf:"bytes,4,opt,name=output_file,json=outputFile" json:"output_file,omitempty"` + // Due to header discovery, this won't include headers unless the build is + // actually performed. If set, this field will include the value of + // "source_file" in addition to the headers. + SourcesAndHeaders []string `protobuf:"bytes,5,rep,name=sources_and_headers,json=sourcesAndHeaders" json:"sources_and_headers,omitempty"` + // A list of environment variables and their values. No order is enforced. + Variable []*EnvironmentVariable `protobuf:"bytes,6,rep,name=variable" json:"variable,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CppCompileInfo) Reset() { *m = CppCompileInfo{} } +func (m *CppCompileInfo) String() string { return proto.CompactTextString(m) } +func (*CppCompileInfo) ProtoMessage() {} +func (*CppCompileInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *CppCompileInfo) GetTool() string { + if m != nil && m.Tool != nil { + return *m.Tool + } + return "" +} + +func (m *CppCompileInfo) GetCompilerOption() []string { + if m != nil { + return m.CompilerOption + } + return nil +} + +func (m *CppCompileInfo) GetSourceFile() string { + if m != nil && m.SourceFile != nil { + return *m.SourceFile + } + return "" +} + +func (m *CppCompileInfo) GetOutputFile() string { + if m != nil && m.OutputFile != nil { + return *m.OutputFile + } + return "" +} + +func (m *CppCompileInfo) GetSourcesAndHeaders() []string { + if m != nil { + return m.SourcesAndHeaders + } + return nil +} + +func (m *CppCompileInfo) GetVariable() []*EnvironmentVariable { + if m != nil { + return m.Variable + } + return nil +} + +var E_CppCompileInfo_CppCompileInfo = &proto.ExtensionDesc{ + ExtendedType: (*ExtraActionInfo)(nil), + ExtensionType: (*CppCompileInfo)(nil), + Field: 1001, + Name: "blaze.CppCompileInfo.cpp_compile_info", + Tag: "bytes,1001,opt,name=cpp_compile_info,json=cppCompileInfo", + Filename: "extra_actions_base_proto/extra_actions_base.proto", +} + +// Provides access to data that is specific to C++ link actions. +// Usually provided by actions using the "CppLink" Mnemonic. +type CppLinkInfo struct { + InputFile []string `protobuf:"bytes,1,rep,name=input_file,json=inputFile" json:"input_file,omitempty"` + OutputFile *string `protobuf:"bytes,2,opt,name=output_file,json=outputFile" json:"output_file,omitempty"` + InterfaceOutputFile *string `protobuf:"bytes,3,opt,name=interface_output_file,json=interfaceOutputFile" json:"interface_output_file,omitempty"` + LinkTargetType *string `protobuf:"bytes,4,opt,name=link_target_type,json=linkTargetType" json:"link_target_type,omitempty"` + LinkStaticness *string `protobuf:"bytes,5,opt,name=link_staticness,json=linkStaticness" json:"link_staticness,omitempty"` + LinkStamp []string `protobuf:"bytes,6,rep,name=link_stamp,json=linkStamp" json:"link_stamp,omitempty"` + BuildInfoHeaderArtifact []string `protobuf:"bytes,7,rep,name=build_info_header_artifact,json=buildInfoHeaderArtifact" json:"build_info_header_artifact,omitempty"` + // The list of command line options used for running the linking tool. + LinkOpt []string `protobuf:"bytes,8,rep,name=link_opt,json=linkOpt" json:"link_opt,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *CppLinkInfo) Reset() { *m = CppLinkInfo{} } +func (m *CppLinkInfo) String() string { return proto.CompactTextString(m) } +func (*CppLinkInfo) ProtoMessage() {} +func (*CppLinkInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} } + +func (m *CppLinkInfo) GetInputFile() []string { + if m != nil { + return m.InputFile + } + return nil +} + +func (m *CppLinkInfo) GetOutputFile() string { + if m != nil && m.OutputFile != nil { + return *m.OutputFile + } + return "" +} + +func (m *CppLinkInfo) GetInterfaceOutputFile() string { + if m != nil && m.InterfaceOutputFile != nil { + return *m.InterfaceOutputFile + } + return "" +} + +func (m *CppLinkInfo) GetLinkTargetType() string { + if m != nil && m.LinkTargetType != nil { + return *m.LinkTargetType + } + return "" +} + +func (m *CppLinkInfo) GetLinkStaticness() string { + if m != nil && m.LinkStaticness != nil { + return *m.LinkStaticness + } + return "" +} + +func (m *CppLinkInfo) GetLinkStamp() []string { + if m != nil { + return m.LinkStamp + } + return nil +} + +func (m *CppLinkInfo) GetBuildInfoHeaderArtifact() []string { + if m != nil { + return m.BuildInfoHeaderArtifact + } + return nil +} + +func (m *CppLinkInfo) GetLinkOpt() []string { + if m != nil { + return m.LinkOpt + } + return nil +} + +var E_CppLinkInfo_CppLinkInfo = &proto.ExtensionDesc{ + ExtendedType: (*ExtraActionInfo)(nil), + ExtensionType: (*CppLinkInfo)(nil), + Field: 1002, + Name: "blaze.CppLinkInfo.cpp_link_info", + Tag: "bytes,1002,opt,name=cpp_link_info,json=cppLinkInfo", + Filename: "extra_actions_base_proto/extra_actions_base.proto", +} + +// Provides access to data that is specific to java compile actions. +// Usually provided by actions using the "Javac" Mnemonic. +type JavaCompileInfo struct { + Outputjar *string `protobuf:"bytes,1,opt,name=outputjar" json:"outputjar,omitempty"` + Classpath []string `protobuf:"bytes,2,rep,name=classpath" json:"classpath,omitempty"` + Sourcepath []string `protobuf:"bytes,3,rep,name=sourcepath" json:"sourcepath,omitempty"` + SourceFile []string `protobuf:"bytes,4,rep,name=source_file,json=sourceFile" json:"source_file,omitempty"` + JavacOpt []string `protobuf:"bytes,5,rep,name=javac_opt,json=javacOpt" json:"javac_opt,omitempty"` + Processor []string `protobuf:"bytes,6,rep,name=processor" json:"processor,omitempty"` + Processorpath []string `protobuf:"bytes,7,rep,name=processorpath" json:"processorpath,omitempty"` + Bootclasspath []string `protobuf:"bytes,8,rep,name=bootclasspath" json:"bootclasspath,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *JavaCompileInfo) Reset() { *m = JavaCompileInfo{} } +func (m *JavaCompileInfo) String() string { return proto.CompactTextString(m) } +func (*JavaCompileInfo) ProtoMessage() {} +func (*JavaCompileInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} } + +func (m *JavaCompileInfo) GetOutputjar() string { + if m != nil && m.Outputjar != nil { + return *m.Outputjar + } + return "" +} + +func (m *JavaCompileInfo) GetClasspath() []string { + if m != nil { + return m.Classpath + } + return nil +} + +func (m *JavaCompileInfo) GetSourcepath() []string { + if m != nil { + return m.Sourcepath + } + return nil +} + +func (m *JavaCompileInfo) GetSourceFile() []string { + if m != nil { + return m.SourceFile + } + return nil +} + +func (m *JavaCompileInfo) GetJavacOpt() []string { + if m != nil { + return m.JavacOpt + } + return nil +} + +func (m *JavaCompileInfo) GetProcessor() []string { + if m != nil { + return m.Processor + } + return nil +} + +func (m *JavaCompileInfo) GetProcessorpath() []string { + if m != nil { + return m.Processorpath + } + return nil +} + +func (m *JavaCompileInfo) GetBootclasspath() []string { + if m != nil { + return m.Bootclasspath + } + return nil +} + +var E_JavaCompileInfo_JavaCompileInfo = &proto.ExtensionDesc{ + ExtendedType: (*ExtraActionInfo)(nil), + ExtensionType: (*JavaCompileInfo)(nil), + Field: 1000, + Name: "blaze.JavaCompileInfo.java_compile_info", + Tag: "bytes,1000,opt,name=java_compile_info,json=javaCompileInfo", + Filename: "extra_actions_base_proto/extra_actions_base.proto", +} + +// Provides access to data that is specific to python rules. +// Usually provided by actions using the "Python" Mnemonic. +type PythonInfo struct { + SourceFile []string `protobuf:"bytes,1,rep,name=source_file,json=sourceFile" json:"source_file,omitempty"` + DepFile []string `protobuf:"bytes,2,rep,name=dep_file,json=depFile" json:"dep_file,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *PythonInfo) Reset() { *m = PythonInfo{} } +func (m *PythonInfo) String() string { return proto.CompactTextString(m) } +func (*PythonInfo) ProtoMessage() {} +func (*PythonInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} } + +func (m *PythonInfo) GetSourceFile() []string { + if m != nil { + return m.SourceFile + } + return nil +} + +func (m *PythonInfo) GetDepFile() []string { + if m != nil { + return m.DepFile + } + return nil +} + +var E_PythonInfo_PythonInfo = &proto.ExtensionDesc{ + ExtendedType: (*ExtraActionInfo)(nil), + ExtensionType: (*PythonInfo)(nil), + Field: 1005, + Name: "blaze.PythonInfo.python_info", + Tag: "bytes,1005,opt,name=python_info,json=pythonInfo", + Filename: "extra_actions_base_proto/extra_actions_base.proto", +} + +func init() { + proto.RegisterType((*ExtraActionSummary)(nil), "blaze.ExtraActionSummary") + proto.RegisterType((*DetailedExtraActionInfo)(nil), "blaze.DetailedExtraActionInfo") + proto.RegisterType((*ExtraActionInfo)(nil), "blaze.ExtraActionInfo") + proto.RegisterType((*ExtraActionInfo_StringList)(nil), "blaze.ExtraActionInfo.StringList") + proto.RegisterType((*ExtraActionInfo_AspectDescriptor)(nil), "blaze.ExtraActionInfo.AspectDescriptor") + proto.RegisterType((*ExtraActionInfo_AspectDescriptor_StringList)(nil), "blaze.ExtraActionInfo.AspectDescriptor.StringList") + proto.RegisterType((*EnvironmentVariable)(nil), "blaze.EnvironmentVariable") + proto.RegisterType((*SpawnInfo)(nil), "blaze.SpawnInfo") + proto.RegisterType((*CppCompileInfo)(nil), "blaze.CppCompileInfo") + proto.RegisterType((*CppLinkInfo)(nil), "blaze.CppLinkInfo") + proto.RegisterType((*JavaCompileInfo)(nil), "blaze.JavaCompileInfo") + proto.RegisterType((*PythonInfo)(nil), "blaze.PythonInfo") + proto.RegisterExtension(E_SpawnInfo_SpawnInfo) + proto.RegisterExtension(E_CppCompileInfo_CppCompileInfo) + proto.RegisterExtension(E_CppLinkInfo_CppLinkInfo) + proto.RegisterExtension(E_JavaCompileInfo_JavaCompileInfo) + proto.RegisterExtension(E_PythonInfo_PythonInfo) +} + +func init() { proto.RegisterFile("extra_actions_base_proto/extra_actions_base.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 1038 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xdd, 0x6e, 0x23, 0x35, + 0x14, 0x56, 0xa6, 0x4d, 0x9b, 0x39, 0xd1, 0xa6, 0xa9, 0x4b, 0xd9, 0x6c, 0x80, 0xdd, 0x12, 0x10, + 0x5b, 0x01, 0x9a, 0x15, 0xb9, 0x58, 0x10, 0x08, 0xa1, 0x6e, 0xb7, 0xa8, 0x40, 0x45, 0xab, 0xe9, + 0x0a, 0x21, 0x21, 0x34, 0x72, 0x26, 0x6e, 0xea, 0x76, 0x66, 0x6c, 0x79, 0x9c, 0x94, 0x70, 0xd5, + 0x97, 0xe0, 0x8e, 0x47, 0xe1, 0x25, 0x78, 0x8b, 0x5d, 0x10, 0x37, 0x88, 0x07, 0x40, 0x3e, 0xf6, + 0xcc, 0x34, 0x69, 0xda, 0xed, 0x9d, 0xfd, 0x9d, 0x6f, 0xce, 0xcf, 0xf7, 0xd9, 0x4e, 0xe0, 0x13, + 0xf6, 0x8b, 0x56, 0x34, 0xa2, 0xb1, 0xe6, 0x22, 0xcb, 0xa3, 0x01, 0xcd, 0x59, 0x24, 0x95, 0xd0, + 0xe2, 0xc9, 0xf5, 0x40, 0x80, 0x01, 0x52, 0x1f, 0x24, 0xf4, 0x57, 0xd6, 0x3b, 0x00, 0xb2, 0x67, + 0x28, 0x3b, 0xc8, 0x38, 0x1e, 0xa7, 0x29, 0x55, 0x53, 0xf2, 0x14, 0x56, 0xec, 0x27, 0x9d, 0xda, + 0xd6, 0xd2, 0x76, 0xb3, 0xff, 0x30, 0x40, 0x76, 0xf0, 0x9c, 0x69, 0xca, 0x13, 0x36, 0xbc, 0xf2, + 0xc9, 0x37, 0xd9, 0x89, 0x08, 0x1d, 0xbb, 0xa7, 0xe0, 0xfe, 0x0d, 0x14, 0xf2, 0x18, 0xd6, 0xb4, + 0xe2, 0xa3, 0x11, 0x53, 0x3c, 0x1b, 0x45, 0x27, 0x3c, 0x61, 0x9d, 0xda, 0x56, 0x6d, 0xdb, 0x0f, + 0x5b, 0x15, 0xfc, 0x35, 0x4f, 0x18, 0x09, 0xca, 0xda, 0xde, 0x96, 0xb7, 0xdd, 0xec, 0xbf, 0xe9, + 0x6a, 0xdf, 0x54, 0xf3, 0xbf, 0x3a, 0xac, 0xcd, 0x17, 0x7b, 0x03, 0xea, 0xe2, 0x22, 0x63, 0xca, + 0x95, 0xb0, 0x1b, 0xf2, 0x1e, 0x34, 0x69, 0x2e, 0x59, 0xac, 0xa3, 0x8c, 0xa6, 0xac, 0xb3, 0x62, + 0x62, 0xcf, 0xbc, 0x4e, 0x2d, 0x04, 0x0b, 0x7f, 0x4f, 0x53, 0x46, 0x7e, 0x86, 0x75, 0x47, 0x92, + 0x54, 0xd1, 0x94, 0x69, 0xa6, 0xf2, 0xce, 0x2a, 0xaa, 0xf0, 0xf1, 0xe2, 0x4e, 0x82, 0x1d, 0xe4, + 0x1f, 0x95, 0xf4, 0xbd, 0x4c, 0xab, 0x29, 0x26, 0x6e, 0xd3, 0xb9, 0x10, 0xd9, 0x81, 0x55, 0x8b, + 0xe5, 0x9d, 0x06, 0x26, 0x7d, 0x7c, 0x6b, 0xd2, 0xe7, 0x2c, 0x8f, 0x15, 0x97, 0x5a, 0xa8, 0xb0, + 0xf8, 0x8e, 0xb4, 0xc0, 0xe3, 0xc3, 0x8e, 0x87, 0x93, 0x79, 0x7c, 0x48, 0xba, 0xd0, 0x48, 0x33, + 0x96, 0x8a, 0x8c, 0xc7, 0x9d, 0x3a, 0xa2, 0xe5, 0xbe, 0x7b, 0x02, 0x9b, 0x0b, 0xbb, 0x23, 0x6d, + 0x58, 0x3a, 0x67, 0x53, 0xa7, 0x8f, 0x59, 0x92, 0x4f, 0xa1, 0x3e, 0xa1, 0xc9, 0x98, 0x61, 0xe6, + 0x66, 0xff, 0xdd, 0x1b, 0xfa, 0x3a, 0xd6, 0xc6, 0xa9, 0x03, 0x9e, 0xeb, 0xd0, 0xf2, 0x3f, 0xf7, + 0x3e, 0xab, 0x75, 0x3f, 0x00, 0xa8, 0x02, 0x46, 0x7e, 0x9b, 0xca, 0x9c, 0x1e, 0xbf, 0xe4, 0x75, + 0x6a, 0xdd, 0x3f, 0x3c, 0x68, 0xcf, 0x4f, 0x46, 0x1e, 0xcd, 0xfa, 0x62, 0x7b, 0xba, 0xea, 0xc9, + 0xd9, 0x22, 0x4f, 0x3c, 0x94, 0xef, 0xcb, 0x3b, 0xca, 0xb7, 0xd8, 0xa4, 0xeb, 0x06, 0x75, 0x2f, + 0xee, 0xae, 0xd8, 0xfe, 0xac, 0x62, 0xfd, 0xbb, 0xb6, 0xb2, 0x58, 0xc2, 0xde, 0xeb, 0x25, 0xfc, + 0xd0, 0x6f, 0xbc, 0x5c, 0x6d, 0x5f, 0x5e, 0x5e, 0x5e, 0x7a, 0xbd, 0xaf, 0x60, 0x63, 0x2f, 0x9b, + 0x70, 0x25, 0xb2, 0x94, 0x65, 0xfa, 0x07, 0xaa, 0x38, 0x1d, 0x24, 0x8c, 0x10, 0x58, 0x76, 0x22, + 0x7a, 0xdb, 0x7e, 0x88, 0xeb, 0x2a, 0x97, 0x87, 0xa0, 0xdd, 0xf4, 0x5e, 0xd5, 0xc0, 0x3f, 0x96, + 0xf4, 0xc2, 0xde, 0x98, 0x2e, 0x34, 0xa8, 0x1a, 0x8d, 0x4d, 0x2e, 0x57, 0xb2, 0xdc, 0x93, 0xa7, + 0xd0, 0x98, 0xb8, 0xfc, 0x4e, 0xf5, 0x6e, 0x31, 0xea, 0xf5, 0x0e, 0xc2, 0x92, 0x4b, 0xde, 0x01, + 0xe0, 0x99, 0x1c, 0x6b, 0x7b, 0xdb, 0x97, 0x31, 0xab, 0x8f, 0x08, 0x5e, 0xf4, 0x47, 0xd0, 0x14, + 0x63, 0x5d, 0xc6, 0xeb, 0x18, 0x07, 0x0b, 0x19, 0x42, 0x7f, 0x1f, 0x20, 0x37, 0x0d, 0x46, 0xdc, + 0x74, 0x78, 0xc3, 0x3b, 0xd0, 0xf9, 0x7b, 0x15, 0xd5, 0x6f, 0xbb, 0x70, 0x39, 0x52, 0xe8, 0xe7, + 0xc5, 0xb2, 0xf7, 0xa7, 0x07, 0xad, 0x5d, 0x29, 0x77, 0x45, 0x2a, 0x79, 0xc2, 0x70, 0x60, 0x02, + 0xcb, 0x5a, 0x88, 0xc4, 0xf9, 0x89, 0x6b, 0xf3, 0x46, 0xc5, 0x96, 0xa2, 0x22, 0x21, 0xdd, 0x1b, + 0x64, 0xba, 0x6a, 0x15, 0xf0, 0x21, 0xa2, 0xa6, 0xf5, 0x5c, 0x8c, 0x55, 0xcc, 0x6c, 0xeb, 0x4b, + 0xf6, 0xc4, 0x5a, 0x68, 0xd1, 0x6c, 0xcb, 0x96, 0x50, 0xcd, 0x46, 0x02, 0xd8, 0xb0, 0xf4, 0x3c, + 0xa2, 0xd9, 0x30, 0x3a, 0x65, 0x74, 0x68, 0x0e, 0xb5, 0x15, 0x61, 0xdd, 0x85, 0x76, 0xb2, 0xe1, + 0xbe, 0x0d, 0xcc, 0x78, 0xb0, 0x72, 0x77, 0x0f, 0xfa, 0x3f, 0x42, 0x3b, 0x96, 0x32, 0x72, 0xfd, + 0xdf, 0xae, 0xe4, 0x2b, 0xab, 0xe4, 0xa6, 0x0b, 0xcf, 0x0a, 0x16, 0xb6, 0xe2, 0x99, 0x7d, 0xef, + 0xf7, 0x25, 0x68, 0xee, 0x4a, 0x79, 0xc0, 0xb3, 0x73, 0x14, 0x74, 0xd6, 0xed, 0xda, 0x6b, 0xdc, + 0xf6, 0xae, 0x29, 0xd2, 0x87, 0x4d, 0x9e, 0x69, 0xa6, 0x4e, 0x68, 0xcc, 0xa2, 0xab, 0x54, 0xab, + 0xee, 0x46, 0x19, 0x3c, 0xac, 0xbe, 0xd9, 0x86, 0x76, 0xc2, 0xb3, 0xf3, 0x48, 0x53, 0x35, 0x62, + 0x3a, 0xd2, 0x53, 0x59, 0x68, 0xdd, 0x32, 0xf8, 0x0b, 0x84, 0x5f, 0x4c, 0x25, 0x33, 0xd6, 0x22, + 0x33, 0xd7, 0x54, 0xf3, 0x38, 0x63, 0x79, 0xee, 0xde, 0x4a, 0x24, 0x1e, 0x97, 0xa8, 0x19, 0xa3, + 0x20, 0xa6, 0x12, 0xa5, 0xf6, 0x43, 0xdf, 0x71, 0x52, 0x49, 0xbe, 0x80, 0xee, 0x60, 0xcc, 0x93, + 0x21, 0x2a, 0xe9, 0x6c, 0x8b, 0xa8, 0xd2, 0xfc, 0x84, 0xc6, 0x1a, 0x7f, 0x27, 0xfc, 0xf0, 0x3e, + 0x32, 0x8c, 0x28, 0xd6, 0xbd, 0x1d, 0x17, 0x26, 0x0f, 0xa0, 0x81, 0xb9, 0x85, 0xd4, 0xf8, 0xfa, + 0xfb, 0xe1, 0xaa, 0xd9, 0x1f, 0x4a, 0xdd, 0x3f, 0x84, 0x7b, 0xc6, 0x27, 0x0c, 0xdf, 0x6a, 0xd2, + 0x5f, 0xd6, 0x24, 0x52, 0x99, 0x54, 0x38, 0x10, 0x36, 0xe3, 0x6a, 0xd3, 0xfb, 0xd7, 0x83, 0xb5, + 0x6f, 0xe9, 0x84, 0x5e, 0x3d, 0xf3, 0x6f, 0x83, 0x6f, 0x85, 0x3d, 0xa3, 0xc5, 0x4f, 0x63, 0x05, + 0x98, 0x68, 0x9c, 0xd0, 0x3c, 0x97, 0x54, 0x9f, 0xba, 0x73, 0x5f, 0x01, 0xe4, 0x21, 0xb8, 0xf3, + 0x8d, 0xe1, 0x25, 0x7b, 0x59, 0x2b, 0x64, 0xfe, 0x4a, 0x2c, 0x5f, 0x25, 0xa0, 0x57, 0x6f, 0x81, + 0x7f, 0x46, 0x27, 0x34, 0xc6, 0xe9, 0xed, 0x39, 0x6f, 0x20, 0x70, 0x28, 0xb5, 0xa9, 0x2d, 0x95, + 0x88, 0x59, 0x9e, 0x0b, 0x55, 0x88, 0x5e, 0x02, 0xe4, 0x7d, 0xb8, 0x57, 0x6e, 0xb0, 0xbc, 0xd5, + 0x79, 0x16, 0x34, 0xac, 0x81, 0x10, 0xba, 0x9a, 0xc1, 0x4a, 0x3c, 0x0b, 0xf6, 0x7f, 0x82, 0x75, + 0x53, 0xf5, 0x6e, 0x37, 0xe2, 0xa5, 0x15, 0xbb, 0x08, 0xcf, 0xe9, 0x19, 0xae, 0x9d, 0xcd, 0x02, + 0xbd, 0xdf, 0x6a, 0x00, 0x47, 0x53, 0x7d, 0xea, 0xfe, 0x86, 0xcc, 0x69, 0x52, 0xbb, 0xa6, 0xc9, + 0x03, 0x68, 0x0c, 0x99, 0x2c, 0x6e, 0x04, 0x1e, 0x88, 0x21, 0x93, 0xf8, 0xf8, 0x7d, 0x07, 0x4d, + 0x89, 0x99, 0x6e, 0xef, 0xf0, 0x1f, 0xdb, 0xe1, 0xba, 0x0b, 0x57, 0xc5, 0x43, 0x90, 0xe5, 0xfa, + 0xd9, 0x13, 0xf8, 0x28, 0x16, 0x69, 0x30, 0x12, 0x62, 0x94, 0xb0, 0x60, 0xc8, 0x26, 0xe6, 0xb9, + 0xcb, 0x03, 0x3c, 0xa7, 0x41, 0xc2, 0x07, 0x81, 0xfb, 0x83, 0x18, 0xe0, 0xdf, 0xc5, 0xa3, 0xda, + 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x81, 0x96, 0x76, 0xca, 0x51, 0x0a, 0x00, 0x00, +} diff --git a/vendor/github.com/bazelbuild/buildtools/file/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/file/BUILD.bazel new file mode 100644 index 00000000000..dddf9af4bee --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/file/BUILD.bazel @@ -0,0 +1,7 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["file.go"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/file/file.go b/vendor/github.com/bazelbuild/buildtools/file/file.go new file mode 100644 index 00000000000..a311099b184 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/file/file.go @@ -0,0 +1,48 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Package file provides utility file operations. +package file + +import ( + "fmt" + "io" + "io/ioutil" + "os" +) + +// ReadFile is like ioutil.ReadFile. +func ReadFile(name string) ([]byte, os.FileInfo, error) { + fi, err := os.Stat(name) + if err != nil { + return nil, nil, err + } + + data, err := ioutil.ReadFile(name) + return data, fi, err +} + +// WriteFile is like ioutil.WriteFile +func WriteFile(name string, data []byte) error { + return ioutil.WriteFile(name, data, 0644) +} + +// OpenReadFile is like os.Open. +func OpenReadFile(name string) io.ReadCloser { + f, err := os.Open(name) + if err != nil { + fmt.Fprintf(os.Stderr, "Could not open %s\n", name) + os.Exit(1) + } + return f +} diff --git a/vendor/github.com/bazelbuild/buildtools/generatetables/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/generatetables/BUILD.bazel new file mode 100644 index 00000000000..88aa8e7744e --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/generatetables/BUILD.bazel @@ -0,0 +1,17 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = ["generate_tables.go"], + visibility = ["//visibility:private"], + deps = [ + "//build_proto:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + ], +) + +go_binary( + name = "generatetables", + library = ":go_default_library", + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/generatetables/generate_tables.go b/vendor/github.com/bazelbuild/buildtools/generatetables/generate_tables.go new file mode 100644 index 00000000000..68f2ccd73df --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/generatetables/generate_tables.go @@ -0,0 +1,107 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// generateTables is a tool that generates a go file from the Build language proto file. +// It generates a Go map to find the type of an attribute. + +package main + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "sort" + + buildpb "github.com/bazelbuild/buildtools/build_proto" + "github.com/golang/protobuf/proto" +) + +// bazelBuildLanguage reads a proto file and returns a BuildLanguage object. +func bazelBuildLanguage(file string) (*buildpb.BuildLanguage, error) { + data, err := ioutil.ReadFile(file) + if err != nil { + fmt.Fprintf(os.Stderr, "Cannot read %s: %s\n", file, err) + return nil, err + } + + lang := &buildpb.BuildLanguage{} + if err := proto.Unmarshal(data, lang); err != nil { + return nil, err + } + return lang, nil +} + +// generateTable returns a map that associate a type for each attribute name found in Bazel. +func generateTable(rules []*buildpb.RuleDefinition) map[string]buildpb.Attribute_Discriminator { + types := make(map[string]buildpb.Attribute_Discriminator) + for _, r := range rules { + for _, attr := range r.Attribute { + types[*attr.Name] = *attr.Type + } + } + + // Because of inconsistencies in bazel, we need a few exceptions. + types["resources"] = buildpb.Attribute_LABEL_LIST + types["out"] = buildpb.Attribute_STRING + types["outs"] = buildpb.Attribute_STRING_LIST + types["stamp"] = buildpb.Attribute_TRISTATE + types["strip"] = buildpb.Attribute_BOOLEAN + + // Surprisingly, the name argument is missing. + types["name"] = buildpb.Attribute_STRING + + // package arguments are also not listed in the proto file + types["default_hdrs_check"] = buildpb.Attribute_STRING + types["default_visibility"] = types["visibility"] + types["default_copts"] = types["copts"] + types["default_deprecation"] = types["deprecation"] + types["default_testonly"] = types["testonly"] + types["features"] = buildpb.Attribute_STRING_LIST + + types["extra_srcs"] = types["srcs"] + types["pytype_deps"] = types["deps"] + + return types +} + +func main() { + if len(os.Args) != 2 { + log.Fatal("Expected argument: proto file\n") + } + lang, err := bazelBuildLanguage(os.Args[1]) + if err != nil { + log.Fatalf("%s\n", err) + } + types := generateTable(lang.Rule) + + // sort the keys to get deterministic output + keys := make([]string, 0, len(types)) + for i := range types { + keys = append(keys, i) + } + sort.Strings(keys) + + // print + fmt.Printf(`// Generated file, do not edit. +package lang + +import buildpb "github.com/bazelbuild/buildtools/build_proto" + +var TypeOf = map[string]buildpb.Attribute_Discriminator{ +`) + for _, attr := range keys { + fmt.Printf(" \"%s\": buildpb.Attribute_%s,\n", attr, types[attr]) + } + fmt.Printf("}\n") +} diff --git a/vendor/github.com/bazelbuild/buildtools/lang/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/lang/BUILD.bazel new file mode 100644 index 00000000000..04bd2d88467 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/lang/BUILD.bazel @@ -0,0 +1,28 @@ +# gazelle:ignore + +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("//build:build_defs.bzl", "genfile_check_test") + +genfile_check_test( + src = "tables.gen.go", + gen = "tables.go", +) + +genrule( + name = "generateTablesFile", + # generated by: bazel info build-language > build-language.pb + # Unable to get this info from within a single bazel. + srcs = ["build-language.pb"], + outs = ["tables.go"], + cmd = "$(location //generatetables) $(SRCS) > $@", + tools = ["//generatetables"], +) + +go_library( + name = "go_default_library", + srcs = [ + "tables.go", # keep + ], + visibility = ["//visibility:public"], + deps = ["//build_proto:go_default_library"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/lang/build-language.pb b/vendor/github.com/bazelbuild/buildtools/lang/build-language.pb new file mode 100644 index 00000000000..d046e8187c6 Binary files /dev/null and b/vendor/github.com/bazelbuild/buildtools/lang/build-language.pb differ diff --git a/vendor/github.com/bazelbuild/buildtools/lang/tables.gen.go b/vendor/github.com/bazelbuild/buildtools/lang/tables.gen.go new file mode 100755 index 00000000000..f91ef181cd3 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/lang/tables.gen.go @@ -0,0 +1,245 @@ +// Generated file, do not edit. +package lang + +import buildpb "github.com/bazelbuild/buildtools/build_proto" + +var TypeOf = map[string]buildpb.Attribute_Discriminator{ + "aar": buildpb.Attribute_LABEL, + "actual": buildpb.Attribute_LABEL, + "aliases": buildpb.Attribute_STRING_LIST, + "all_files": buildpb.Attribute_LABEL, + "alwayslink": buildpb.Attribute_BOOLEAN, + "app_asset_catalogs": buildpb.Attribute_LABEL_LIST, + "app_bundle_id": buildpb.Attribute_STRING, + "app_deps": buildpb.Attribute_LABEL_LIST, + "app_entitlements": buildpb.Attribute_LABEL, + "app_icon": buildpb.Attribute_STRING, + "app_infoplists": buildpb.Attribute_LABEL_LIST, + "app_name": buildpb.Attribute_STRING, + "app_provisioning_profile": buildpb.Attribute_LABEL, + "app_resources": buildpb.Attribute_LABEL_LIST, + "app_storyboards": buildpb.Attribute_LABEL_LIST, + "app_strings": buildpb.Attribute_LABEL_LIST, + "app_structured_resources": buildpb.Attribute_LABEL_LIST, + "archives": buildpb.Attribute_LABEL_LIST, + "args": buildpb.Attribute_STRING_LIST, + "artifact": buildpb.Attribute_STRING, + "asset_catalogs": buildpb.Attribute_LABEL_LIST, + "assets": buildpb.Attribute_LABEL_LIST, + "assets_dir": buildpb.Attribute_STRING, + "avoid_deps": buildpb.Attribute_LABEL_LIST, + "binary": buildpb.Attribute_LABEL, + "binary_type": buildpb.Attribute_STRING, + "blacklisted_protos": buildpb.Attribute_LABEL_LIST, + "bootclasspath": buildpb.Attribute_LABEL_LIST, + "build_file": buildpb.Attribute_STRING, + "build_file_content": buildpb.Attribute_STRING, + "bundle_id": buildpb.Attribute_STRING, + "bundle_imports": buildpb.Attribute_LABEL_LIST, + "bundle_loader": buildpb.Attribute_LABEL, + "bundles": buildpb.Attribute_LABEL_LIST, + "classpath_resources": buildpb.Attribute_LABEL_LIST, + "cmd": buildpb.Attribute_STRING, + "command_line": buildpb.Attribute_STRING, + "commit": buildpb.Attribute_STRING, + "compatible_with": buildpb.Attribute_LABEL_LIST, + "compiler_files": buildpb.Attribute_LABEL, + "constraints": buildpb.Attribute_STRING_LIST, + "copts": buildpb.Attribute_STRING_LIST, + "cpu": buildpb.Attribute_STRING, + "create_executable": buildpb.Attribute_BOOLEAN, + "crunch_png": buildpb.Attribute_BOOLEAN, + "custom_package": buildpb.Attribute_STRING, + "data": buildpb.Attribute_LABEL_LIST, + "datamodels": buildpb.Attribute_LABEL_LIST, + "default": buildpb.Attribute_LABEL, + "default_copts": buildpb.Attribute_STRING_LIST, + "default_deprecation": buildpb.Attribute_STRING, + "default_hdrs_check": buildpb.Attribute_STRING, + "default_ios_sdk_version": buildpb.Attribute_STRING, + "default_macosx_sdk_version": buildpb.Attribute_STRING, + "default_python_version": buildpb.Attribute_STRING, + "default_testonly": buildpb.Attribute_BOOLEAN, + "default_tvos_sdk_version": buildpb.Attribute_STRING, + "default_visibility": buildpb.Attribute_STRING_LIST, + "default_watchos_sdk_version": buildpb.Attribute_STRING, + "defines": buildpb.Attribute_STRING_LIST, + "densities": buildpb.Attribute_STRING_LIST, + "deploy_manifest_lines": buildpb.Attribute_STRING_LIST, + "deprecation": buildpb.Attribute_STRING, + "deps": buildpb.Attribute_LABEL_LIST, + "dex_shards": buildpb.Attribute_INTEGER, + "dexopts": buildpb.Attribute_STRING_LIST, + "distribs": buildpb.Attribute_DISTRIBUTION_SET, + "dwp_files": buildpb.Attribute_LABEL, + "dylibs": buildpb.Attribute_LABEL_LIST, + "dynamic_runtime_libs": buildpb.Attribute_LABEL_LIST, + "enable_modules": buildpb.Attribute_BOOLEAN, + "encoding": buildpb.Attribute_STRING, + "entitlements": buildpb.Attribute_LABEL, + "entry_classes": buildpb.Attribute_STRING_LIST, + "executable": buildpb.Attribute_BOOLEAN, + "exported_plugins": buildpb.Attribute_LABEL_LIST, + "exports": buildpb.Attribute_LABEL_LIST, + "exports_manifest": buildpb.Attribute_BOOLEAN, + "expression": buildpb.Attribute_STRING, + "ext_bundle_id": buildpb.Attribute_STRING, + "ext_entitlements": buildpb.Attribute_LABEL, + "ext_families": buildpb.Attribute_STRING_LIST, + "ext_infoplists": buildpb.Attribute_LABEL_LIST, + "ext_provisioning_profile": buildpb.Attribute_LABEL, + "ext_resources": buildpb.Attribute_LABEL_LIST, + "ext_strings": buildpb.Attribute_LABEL_LIST, + "ext_structured_resources": buildpb.Attribute_LABEL_LIST, + "extclasspath": buildpb.Attribute_LABEL_LIST, + "extensions": buildpb.Attribute_LABEL_LIST, + "extra_actions": buildpb.Attribute_LABEL_LIST, + "extra_srcs": buildpb.Attribute_LABEL_LIST, + "families": buildpb.Attribute_STRING_LIST, + "features": buildpb.Attribute_STRING_LIST, + "flaky": buildpb.Attribute_BOOLEAN, + "framework_imports": buildpb.Attribute_LABEL_LIST, + "genclass": buildpb.Attribute_LABEL_LIST, + "generates_api": buildpb.Attribute_BOOLEAN, + "hdrs": buildpb.Attribute_LABEL_LIST, + "header_compiler": buildpb.Attribute_LABEL_LIST, + "heuristic_label_expansion": buildpb.Attribute_BOOLEAN, + "idl_import_root": buildpb.Attribute_STRING, + "idl_parcelables": buildpb.Attribute_LABEL_LIST, + "idl_srcs": buildpb.Attribute_LABEL_LIST, + "ijar": buildpb.Attribute_LABEL_LIST, + "imports": buildpb.Attribute_STRING_LIST, + "includes": buildpb.Attribute_STRING_LIST, + "incremental_dexing": buildpb.Attribute_TRISTATE, + "infoplist": buildpb.Attribute_LABEL, + "infoplists": buildpb.Attribute_LABEL_LIST, + "init_submodules": buildpb.Attribute_BOOLEAN, + "ios_device_arg": buildpb.Attribute_STRING_LIST, + "ios_test_target_device": buildpb.Attribute_LABEL, + "ios_version": buildpb.Attribute_STRING, + "ipa_post_processor": buildpb.Attribute_LABEL, + "is_dynamic": buildpb.Attribute_BOOLEAN, + "jars": buildpb.Attribute_LABEL_LIST, + "javabuilder": buildpb.Attribute_LABEL_LIST, + "javac": buildpb.Attribute_LABEL_LIST, + "javac_supports_workers": buildpb.Attribute_BOOLEAN, + "javacopts": buildpb.Attribute_STRING_LIST, + "jre_deps": buildpb.Attribute_LABEL_LIST, + "jvm_flags": buildpb.Attribute_STRING_LIST, + "jvm_opts": buildpb.Attribute_STRING_LIST, + "launch_image": buildpb.Attribute_STRING, + "launch_storyboard": buildpb.Attribute_LABEL, + "launcher": buildpb.Attribute_LABEL, + "licenses": buildpb.Attribute_LICENSE, + "linker_files": buildpb.Attribute_LABEL, + "linkopts": buildpb.Attribute_STRING_LIST, + "linkshared": buildpb.Attribute_BOOLEAN, + "linkstamp": buildpb.Attribute_LABEL, + "linkstatic": buildpb.Attribute_BOOLEAN, + "local": buildpb.Attribute_BOOLEAN, + "main": buildpb.Attribute_LABEL, + "main_class": buildpb.Attribute_STRING, + "main_dex_list": buildpb.Attribute_LABEL, + "main_dex_list_opts": buildpb.Attribute_STRING_LIST, + "main_dex_proguard_specs": buildpb.Attribute_LABEL_LIST, + "malloc": buildpb.Attribute_LABEL, + "manifest": buildpb.Attribute_LABEL, + "manifest_merger": buildpb.Attribute_STRING, + "manifest_values": buildpb.Attribute_STRING_DICT, + "message": buildpb.Attribute_STRING, + "misc": buildpb.Attribute_STRING_LIST, + "mnemonics": buildpb.Attribute_STRING_LIST, + "module_map": buildpb.Attribute_LABEL, + "multidex": buildpb.Attribute_STRING, + "name": buildpb.Attribute_STRING, + "neverlink": buildpb.Attribute_BOOLEAN, + "nocompress_extensions": buildpb.Attribute_STRING_LIST, + "nocopts": buildpb.Attribute_STRING, + "non_arc_srcs": buildpb.Attribute_LABEL_LIST, + "non_propagated_deps": buildpb.Attribute_LABEL_LIST, + "objcopy_files": buildpb.Attribute_LABEL, + "options_file": buildpb.Attribute_LABEL, + "opts": buildpb.Attribute_STRING_LIST, + "out": buildpb.Attribute_STRING, + "out_templates": buildpb.Attribute_STRING_LIST, + "output_group": buildpb.Attribute_STRING, + "output_licenses": buildpb.Attribute_LICENSE, + "output_to_bindir": buildpb.Attribute_BOOLEAN, + "outs": buildpb.Attribute_STRING_LIST, + "path": buildpb.Attribute_STRING, + "pch": buildpb.Attribute_LABEL, + "per_proto_includes": buildpb.Attribute_BOOLEAN, + "platform_type": buildpb.Attribute_STRING, + "plugin": buildpb.Attribute_LABEL, + "plugins": buildpb.Attribute_LABEL_LIST, + "portable_proto_filters": buildpb.Attribute_LABEL_LIST, + "prefix": buildpb.Attribute_STRING, + "processor_class": buildpb.Attribute_STRING, + "proguard_apply_mapping": buildpb.Attribute_LABEL, + "proguard_generate_mapping": buildpb.Attribute_BOOLEAN, + "proguard_specs": buildpb.Attribute_LABEL_LIST, + "provisioning_profile": buildpb.Attribute_LABEL, + "pytype_deps": buildpb.Attribute_LABEL_LIST, + "remote": buildpb.Attribute_STRING, + "repository": buildpb.Attribute_STRING, + "require_defined_version": buildpb.Attribute_BOOLEAN, + "requires_action_output": buildpb.Attribute_BOOLEAN, + "resource_configuration_filters": buildpb.Attribute_STRING_LIST, + "resource_files": buildpb.Attribute_LABEL_LIST, + "resource_strip_prefix": buildpb.Attribute_STRING, + "resources": buildpb.Attribute_LABEL_LIST, + "restricted_to": buildpb.Attribute_LABEL_LIST, + "runtime": buildpb.Attribute_LABEL, + "runtime_deps": buildpb.Attribute_LABEL_LIST, + "scope": buildpb.Attribute_LABEL_LIST, + "sdk_dylibs": buildpb.Attribute_STRING_LIST, + "sdk_frameworks": buildpb.Attribute_STRING_LIST, + "sdk_includes": buildpb.Attribute_STRING_LIST, + "server": buildpb.Attribute_STRING, + "settings_file": buildpb.Attribute_STRING, + "sha1": buildpb.Attribute_STRING, + "sha256": buildpb.Attribute_STRING, + "shard_count": buildpb.Attribute_INTEGER, + "singlejar": buildpb.Attribute_LABEL_LIST, + "size": buildpb.Attribute_STRING, + "source_version": buildpb.Attribute_STRING, + "srcjar": buildpb.Attribute_LABEL, + "srcs": buildpb.Attribute_LABEL_LIST, + "srcs_version": buildpb.Attribute_STRING, + "stamp": buildpb.Attribute_TRISTATE, + "static_runtime_libs": buildpb.Attribute_LABEL_LIST, + "storyboards": buildpb.Attribute_LABEL_LIST, + "strict": buildpb.Attribute_BOOLEAN, + "strings": buildpb.Attribute_LABEL_LIST, + "strip": buildpb.Attribute_BOOLEAN, + "strip_files": buildpb.Attribute_LABEL, + "strip_prefix": buildpb.Attribute_STRING, + "structured_resources": buildpb.Attribute_LABEL_LIST, + "supports_header_parsing": buildpb.Attribute_BOOLEAN, + "supports_param_files": buildpb.Attribute_BOOLEAN, + "tag": buildpb.Attribute_STRING, + "tags": buildpb.Attribute_STRING_LIST, + "target_device": buildpb.Attribute_LABEL, + "target_version": buildpb.Attribute_STRING, + "test_class": buildpb.Attribute_STRING, + "testonly": buildpb.Attribute_BOOLEAN, + "tests": buildpb.Attribute_LABEL_LIST, + "textual_hdrs": buildpb.Attribute_LABEL_LIST, + "timeout": buildpb.Attribute_STRING, + "toolchains": buildpb.Attribute_LABEL_LIST, + "tools": buildpb.Attribute_LABEL_LIST, + "type": buildpb.Attribute_STRING, + "url": buildpb.Attribute_STRING, + "use_objc_header_names": buildpb.Attribute_BOOLEAN, + "use_testrunner": buildpb.Attribute_BOOLEAN, + "values": buildpb.Attribute_STRING_DICT, + "version": buildpb.Attribute_STRING, + "versions": buildpb.Attribute_LABEL_LIST, + "visibility": buildpb.Attribute_STRING_LIST, + "weak_sdk_frameworks": buildpb.Attribute_STRING_LIST, + "xcode": buildpb.Attribute_LABEL, + "xctest": buildpb.Attribute_BOOLEAN, + "xctest_app": buildpb.Attribute_LABEL, + "xibs": buildpb.Attribute_LABEL_LIST, + "xlint": buildpb.Attribute_STRING_LIST, +} diff --git a/vendor/github.com/bazelbuild/buildtools/status.sh b/vendor/github.com/bazelbuild/buildtools/status.sh new file mode 100755 index 00000000000..b3e8f58aa6d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/status.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e + +buildifier_tags=$(git describe --tags) +IFS='-' read -a parse_tags <<< "$buildifier_tags" +echo "buildifierVersion ${parse_tags[0]}" + +buildifier_rev=$(git rev-parse HEAD) +echo "buildScmRevision ${buildifier_rev}" diff --git a/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel new file mode 100644 index 00000000000..b34fbe7ea2d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel @@ -0,0 +1,20 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = [ + "jsonparser.go", + "tables.go", + ], + visibility = ["//visibility:public"], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = [ + "jsonparser_test.go", + ], + data = glob(["testdata/*"]), + library = ":go_default_library", +) diff --git a/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go b/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go new file mode 100644 index 00000000000..ca2bc4443af --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go @@ -0,0 +1,63 @@ +/* +Copyright 2017 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package tables + +import ( + "encoding/json" + "io/ioutil" +) + +type Definitions struct { + IsLabelArg map[string]bool + LabelBlacklist map[string]bool + IsListArg map[string]bool + IsSortableListArg map[string]bool + SortableBlacklist map[string]bool + SortableWhitelist map[string]bool + NamePriority map[string]int + StripLabelLeadingSlashes bool + ShortenAbsoluteLabelsToRelative bool +} + +// ParseJSONDefinitions reads and parses JSON table definitions from file. +func ParseJSONDefinitions(file string) (Definitions, error) { + var definitions Definitions + + data, err := ioutil.ReadFile(file) + if err != nil { + return definitions, err + } + + err = json.Unmarshal(data, &definitions) + return definitions, err +} + +// ParseAndUpdateJSONDefinitions reads definitions from file and merges or +// overrides the values in memory. +func ParseAndUpdateJSONDefinitions(file string, merge bool) error { + definitions, err := ParseJSONDefinitions(file) + if err != nil { + return err + } + + if merge { + MergeTables(definitions.IsLabelArg, definitions.LabelBlacklist, definitions.IsListArg, definitions.IsSortableListArg, definitions.SortableBlacklist, definitions.SortableWhitelist, definitions.NamePriority, definitions.StripLabelLeadingSlashes, definitions.ShortenAbsoluteLabelsToRelative) + } else { + OverrideTables(definitions.IsLabelArg, definitions.LabelBlacklist, definitions.IsListArg, definitions.IsSortableListArg, definitions.SortableBlacklist, definitions.SortableWhitelist, definitions.NamePriority, definitions.StripLabelLeadingSlashes, definitions.ShortenAbsoluteLabelsToRelative) + } + return nil +} diff --git a/vendor/github.com/bazelbuild/buildtools/tables/jsonparser_test.go b/vendor/github.com/bazelbuild/buildtools/tables/jsonparser_test.go new file mode 100644 index 00000000000..db65d06a12f --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/tables/jsonparser_test.go @@ -0,0 +1,44 @@ +/* +Copyright 2017 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package tables + +import ( + "os" + "reflect" + "testing" +) + +func TestParseJSONDefinitions(t *testing.T) { + testdata := os.Getenv("TEST_SRCDIR") + "/" + os.Getenv("TEST_WORKSPACE") + "/tables/testdata" + definitions, err := ParseJSONDefinitions(testdata + "/simple_tables.json") + if err != nil { + t.Error(err) + } + + expected := Definitions{ + IsLabelArg: map[string]bool{"srcs": true}, + LabelBlacklist: map[string]bool{}, + IsSortableListArg: map[string]bool{"srcs": true, "visibility": true}, + SortableBlacklist: map[string]bool{"genrule.srcs": true}, + SortableWhitelist: map[string]bool{}, + NamePriority: map[string]int{"name": -1}, + StripLabelLeadingSlashes: true, + } + if !reflect.DeepEqual(expected, definitions) { + t.Errorf("ParseJSONDefinitions() = %v; want %v", definitions, expected) + } +} diff --git a/vendor/github.com/bazelbuild/buildtools/tables/tables.go b/vendor/github.com/bazelbuild/buildtools/tables/tables.go new file mode 100644 index 00000000000..2de9458e1e1 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/tables/tables.go @@ -0,0 +1,249 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +// Tables about what Buildifier can and cannot edit. +// Perhaps eventually this will be +// derived from the BUILD encyclopedia. + +package tables + +// IsLabelArg: a named argument to a rule call is considered to have a value +// that can be treated as a label or list of labels if the name +// is one of these names. There is a separate blacklist for +// rule-specific exceptions. +var IsLabelArg = map[string]bool{ + "app_target": true, + "appdir": true, + "base_package": true, + "build_deps": true, + "cc_deps": true, + "ccdeps": true, + "common_deps": true, + "compile_deps": true, + "compiler": true, + "data": true, + "default_visibility": true, + "dep": true, + "deps": true, + "deps_java": true, + "dont_depend_on": true, + "env_deps": true, + "envscripts": true, + "exported_deps": true, + "exports": true, + "externs_list": true, + "files": true, + "globals": true, + "implementation": true, + "implements": true, + "includes": true, + "interface": true, + "jar": true, + "jars": true, + "javadeps": true, + "lib_deps": true, + "library": true, + "malloc": true, + "model": true, + "mods": true, + "module_deps": true, + "module_target": true, + "of": true, + "plugins": true, + "proto_deps": true, + "proto_target": true, + "protos": true, + "resource": true, + "resources": true, + "runtime_deps": true, + "scope": true, + "shared_deps": true, + "similar_deps": true, + "source_jar": true, + "src": true, + "srcs": true, + "stripped_targets": true, + "suites": true, + "swigdeps": true, + "target": true, + "target_devices": true, + "target_platforms": true, + "template": true, + "test": true, + "tests": true, + "tests_deps": true, + "tool": true, + "tools": true, + "visibility": true, +} + +// LabelBlacklist is the list of call arguments that cannot be +// shortened, because they are not interpreted using the same +// rules as for other labels. +var LabelBlacklist = map[string]bool{ + // Shortening this can cause visibility checks to fail. + "package_group.includes": true, +} + +// By default, edit.types.IsList consults lang.TypeOf to determine if an arg is a list. +// You may override this using IsListArg. Specifying a name here overrides any value +// in lang.TypeOf. +var IsListArg = map[string]bool{ +} + +// IsSortableListArg: a named argument to a rule call is considered to be a sortable list +// if the name is one of these names. There is a separate blacklist for +// rule-specific exceptions. +var IsSortableListArg = map[string]bool{ + "cc_deps": true, + "common_deps": true, + "compile_deps": true, + "configs": true, + "constraints": true, + "data": true, + "default_visibility": true, + "deps": true, + "deps_java": true, + "exported_deps": true, + "exports": true, + "filegroups": true, + "files": true, + "hdrs": true, + "imports": true, + "includes": true, + "inherits": true, + "javadeps": true, + "lib_deps": true, + "module_deps": true, + "out": true, + "outs": true, + "packages": true, + "plugin_modules": true, + "proto_deps": true, + "protos": true, + "pubs": true, + "resources": true, + "runtime_deps": true, + "shared_deps": true, + "similar_deps": true, + "srcs": true, + "swigdeps": true, + "swig_includes": true, + "tags": true, + "tests": true, + "tools": true, + "to_start_extensions": true, + "visibility": true, +} + +// SortableBlacklist records specific rule arguments that must not be reordered. +var SortableBlacklist = map[string]bool{ + "genrule.outs": true, + "genrule.srcs": true, +} + +// SortableWhitelist records specific rule arguments that are guaranteed +// to be reorderable, because bazel re-sorts the list itself after reading the BUILD file. +var SortableWhitelist = map[string]bool{ + "cc_inc_library.hdrs": true, + "cc_library.hdrs": true, + "java_library.srcs": true, + "java_library.resources": true, + "java_binary.srcs": true, + "java_binary.resources": true, + "java_test.srcs": true, + "java_test.resources": true, + "java_library.constraints": true, + "java_import.constraints": true, +} + +// NamePriority maps an argument name to its sorting priority. +// +// NOTE(bazel-team): These are the old buildifier rules. It is likely that this table +// will change, perhaps swapping in a separate table for each call, +// derived from the order used in the Build Encyclopedia. +var NamePriority = map[string]int{ + "name": -99, + "gwt_name": -98, + "package_name": -97, + "visible_node_name": -96, // for boq_initial_css_modules and boq_jswire_test_suite + "size": -95, + "timeout": -94, + "testonly": -93, + "src": -92, + "srcdir": -91, + "srcs": -90, + "out": -89, + "outs": -88, + "hdrs": -87, + "has_services": -86, // before api versions, for proto + "include": -85, // before exclude, for glob + "of": -84, // for check_dependencies + "baseline": -83, // for searchbox_library + // All others sort here, at 0. + "destdir": 1, + "exports": 2, + "runtime_deps": 3, + "deps": 4, + "implementation": 5, + "implements": 6, + "alwayslink": 7, +} + +var StripLabelLeadingSlashes = false + +var ShortenAbsoluteLabelsToRelative = false + +var FormatBzlFiles = false + +// OverrideTables allows a user of the build package to override the special-case rules. The user-provided tables replace the built-in tables. +func OverrideTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) { + IsLabelArg = labelArg + LabelBlacklist = blacklist + IsListArg = listArg + IsSortableListArg = sortableListArg + SortableBlacklist = sortBlacklist + SortableWhitelist = sortWhitelist + NamePriority = namePriority + StripLabelLeadingSlashes = stripLabelLeadingSlashes + ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative +} + +// MergeTables allows a user of the build package to override the special-case rules. The user-provided tables are merged into the built-in tables. +func MergeTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) { + for k, v := range labelArg { + IsLabelArg[k] = v + } + for k, v := range blacklist { + LabelBlacklist[k] = v + } + for k, v := range listArg { + IsListArg[k] = v + } + for k, v := range sortableListArg { + IsSortableListArg[k] = v + } + for k, v := range sortBlacklist { + SortableBlacklist[k] = v + } + for k, v := range sortWhitelist { + SortableWhitelist[k] = v + } + for k, v := range namePriority { + NamePriority[k] = v + } + StripLabelLeadingSlashes = stripLabelLeadingSlashes || StripLabelLeadingSlashes + ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative || ShortenAbsoluteLabelsToRelative +} diff --git a/vendor/github.com/bazelbuild/buildtools/tables/testdata/simple_tables.json b/vendor/github.com/bazelbuild/buildtools/tables/testdata/simple_tables.json new file mode 100644 index 00000000000..ae72bd02a37 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/tables/testdata/simple_tables.json @@ -0,0 +1,23 @@ +{ + "i-am-an-unrelated-field": true, + "IsLabelArg": { + "srcs": true + }, + "LabelBlacklist": { + + }, + "IsSortableListArg": { + "srcs": true, + "visibility": true + }, + "SortableBlacklist": { + "genrule.srcs": true + }, + "SortableWhitelist": { + + }, + "NamePriority": { + "name": -1 + }, + "StripLabelLeadingSlashes": true +} diff --git a/vendor/github.com/bazelbuild/buildtools/unused_deps/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/unused_deps/BUILD.bazel new file mode 100644 index 00000000000..64ccc6f5b96 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/unused_deps/BUILD.bazel @@ -0,0 +1,21 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = ["unused_deps.go"], + visibility = ["//visibility:private"], + deps = [ + "//build:go_default_library", + "//config:go_default_library", + "//deps_proto:go_default_library", + "//edit:go_default_library", + "//extra_actions_base_proto:go_default_library", + "@com_github_golang_protobuf//proto:go_default_library", + ], +) + +go_binary( + name = "unused_deps", + library = ":go_default_library", + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/bazelbuild/buildtools/unused_deps/README.md b/vendor/github.com/bazelbuild/buildtools/unused_deps/README.md new file mode 100644 index 00000000000..00cc76382fc --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/unused_deps/README.md @@ -0,0 +1,35 @@ +# Unused Deps + +unused_deps is a command line tool to determine any unused dependencies +in [java_library](https://docs.bazel.build/versions/master/be/java.html#java_library) +rules. targets. It outputs `buildozer` commands to apply the suggested +prunings. + +## Dependencies + +1. Protobuf go runtime: to download (if not using bazel) +`go get -u github.com/golang/protobuf/{proto,protoc-gen-go}` + + +## Installation + +1. Change directory to the buildifier/unused_deps + +```bash +gopath=$(go env GOPATH) +cd $gopath/src/github.com/bazelbuild/buildtools/unused_deps +``` + +2. Install + +```bash +go install +``` + +## Usage + +```shell +unused_deps TARGET... +``` + +Here, `TARGET` is a space-separated list of Bazel labels, with support for `:all` and `...` diff --git a/vendor/github.com/bazelbuild/buildtools/unused_deps/unused_deps.go b/vendor/github.com/bazelbuild/buildtools/unused_deps/unused_deps.go new file mode 100644 index 00000000000..79f3d1d814d --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/unused_deps/unused_deps.go @@ -0,0 +1,295 @@ +/* +Copyright 2017 Google Inc. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// The unused_deps binary prints out buildozer commands for removing +// unused java dependencies from java_library bazel rules. +package main + +import ( + "bufio" + "bytes" + "errors" + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "strings" + + "github.com/bazelbuild/buildtools/build" + "github.com/bazelbuild/buildtools/config" + depspb "github.com/bazelbuild/buildtools/deps_proto" + "github.com/bazelbuild/buildtools/edit" + eapb "github.com/bazelbuild/buildtools/extra_actions_base_proto" + "github.com/golang/protobuf/proto" +) + +var ( + buildTool = flag.String("build_tool", config.DefaultBuildTool, config.BuildToolHelp) + extraActionFileName = flag.String("extra_action_file", "", config.ExtraActionFileNameHelp) + outputFileName = flag.String("output_file", "", "used only with extra_action_file") + buildOptions = stringList("extra_build_flags", "Extra build flags to use when building the targets.") + + blazeFlags = []string{"--tool_tag=unused_deps", "--keep_going", "--color=yes", "--curses=yes"} +) + +func stringList(name, help string) func() []string { + f := flag.String(name, "", help) + return func() []string { + if *f == "" { + return nil + } + res := strings.Split(*f, ",") + for i := range res { + res[i] = strings.TrimSpace(res[i]) + } + return res + } +} + +// getJarPath prints the path to the output jar file specified in the extra_action file at path. +func getJarPath(path string) (string, error) { + data, err := ioutil.ReadFile(path) + if err != nil { + return "", err + } + i := &eapb.ExtraActionInfo{} + if err := proto.Unmarshal(data, i); err != nil { + return "", err + } + ext, err := proto.GetExtension(i, eapb.E_JavaCompileInfo_JavaCompileInfo) + if err != nil { + return "", err + } + jci, ok := ext.(*eapb.JavaCompileInfo) + if !ok { + return "", errors.New("no JavaCompileInfo in " + path) + } + return jci.GetOutputjar(), nil +} + +// writeUnusedDeps writes the labels of unused direct deps, one per line, to outputFileName. +func writeUnusedDeps(jarPath, outputFileName string) { + depsPath := strings.Replace(jarPath, ".jar", ".jdeps", 1) + paramsPath := jarPath + "-2.params" + file, _ := os.Create(outputFileName) + for dep := range unusedDeps(depsPath, directDepParams(paramsPath)) { + file.WriteString(dep + "\n") + } +} + +func cmdWithStderr(name string, arg ...string) *exec.Cmd { + cmd := exec.Command(name, arg...) + cmd.Stderr = os.Stderr + return cmd +} + +// blazeInfo retrieves the blaze info value for a given key. +func blazeInfo(key string) (value string) { + out, err := cmdWithStderr(*buildTool, "info", key).Output() + if err != nil { + log.Printf("'%s info %s' failed: %s", *buildTool, key, err) + } + return strings.TrimSpace(bytes.NewBuffer(out).String()) +} + +// inputFileName returns a blaze output file name from which to read input. +func inputFileName(blazeBin, pkg, ruleName, extension string) string { + name := fmt.Sprintf("%s/%s/lib%s.%s", blazeBin, pkg, ruleName, extension) // *_library + if _, err := os.Stat(name); err == nil { + return name + } + // lazily let the caller handle it if this doesn't exist + return fmt.Sprintf("%s/%s/%s.%s", blazeBin, pkg, ruleName, extension) // *_{binary,test} +} + +// directDepParams returns --direct_dependency entries from paramsFileName (a jar-2.params file) +// as a map from jar files to labels. +func directDepParams(paramsFileName string) (depsByJar map[string]string) { + depsByJar = make(map[string]string) + data, err := ioutil.ReadFile(paramsFileName) + if err != nil { + log.Println(err) + return depsByJar + } + // the classpath param exceeds MaxScanTokenSize, so we scan just this section: + first := bytes.Index(data, []byte("--direct_dependency")) + if first < 0 { + return depsByJar + } + scanner := bufio.NewScanner(bytes.NewReader(data[first:])) + for scanner.Scan() { + if scanner.Text() == "--direct_dependency" { + scanner.Scan() + jar := scanner.Text() + scanner.Scan() + label := scanner.Text() + depsByJar[jar] = label + } + } + if err := scanner.Err(); err != nil { + log.Printf("reading %s: %s", paramsFileName, err) + } + return depsByJar +} + +// unusedDeps returns a set of labels that are unused deps. +// It reads Dependencies proto messages from depsFileName (a jdeps file), which indicate deps used +// at compile time, and returns those values in the depsByJar map that aren't used at compile time. +func unusedDeps(depsFileName string, depsByJar map[string]string) (unusedDeps map[string]bool) { + unusedDeps = make(map[string]bool) + data, err := ioutil.ReadFile(depsFileName) + if err != nil { + log.Println(err) + return unusedDeps + } + dependencies := &depspb.Dependencies{} + if err := proto.Unmarshal(data, dependencies); err != nil { + log.Println(err) + return unusedDeps + } + for _, label := range depsByJar { + unusedDeps[label] = true + } + for _, dependency := range dependencies.Dependency { + if *dependency.Kind == depspb.Dependency_EXPLICIT { + delete(unusedDeps, depsByJar[*dependency.Path]) + } + } + return unusedDeps +} + +// parseBuildFile tries to read and parse the contents of buildFileName. +func parseBuildFile(buildFileName string) (buildFile *build.File, err error) { + data, err := ioutil.ReadFile(buildFileName) + if err != nil { + return nil, err + } + return build.Parse(buildFileName, data) +} + +// getDepsExpr tries to parse the content of buildFileName and return the deps Expr for ruleName. +func getDepsExpr(buildFileName string, ruleName string) build.Expr { + buildFile, err := parseBuildFile(buildFileName) + if buildFile == nil { + log.Printf("%s when parsing %s", err, buildFileName) + return nil + } + rule := edit.FindRuleByName(buildFile, ruleName) + if rule == nil { + log.Printf("%s not found in %s", ruleName, buildFileName) + return nil + } + depsExpr := rule.Attr("deps") + if depsExpr == nil { + log.Printf("no deps attribute for %s in %s", ruleName, buildFileName) + } + return depsExpr +} + +// hasRuntimeComment returns true if expr has an EOL comment containing the word "runtime". +// TODO(bazel-team): delete when this comment convention is extinct +func hasRuntimeComment(expr build.Expr) bool { + for _, comment := range expr.Comment().Suffix { + if strings.Contains(strings.ToLower(comment.Token), "runtime") { + return true + } + } + return false +} + +// printCommands prints, for each key in the deps map, a buildozer command +// to remove that entry from the deps attribute of the rule identified by label. +// Returns true if at least one command was printed, or false otherwise. +func printCommands(label string, deps map[string]bool) (anyCommandPrinted bool) { + buildFileName, pkg, ruleName := edit.InterpretLabel(label) + depsExpr := getDepsExpr(buildFileName, ruleName) + for _, li := range edit.AllLists(depsExpr) { + for _, elem := range li.List { + for dep := range deps { + str, ok := elem.(*build.StringExpr) + if ok && edit.LabelsEqual(str.Value, dep, pkg) { + if hasRuntimeComment(str) { + fmt.Printf("buildozer 'move deps runtime_deps %s' %s\n", str.Value, label) + } else { + fmt.Printf("buildozer 'remove deps %s' %s\n", str.Value, label) + } + anyCommandPrinted = true + } + } + } + } + return anyCommandPrinted +} + +func usage() { + fmt.Fprintf(os.Stderr, `usage: unused_deps TARGET... + +For Java rules in TARGETs, prints commands to delete deps unused at compile time. +Note these may be used at run time; see documentation for more information. +`) + os.Exit(2) +} + +func main() { + flag.Usage = usage + flag.Parse() + if *extraActionFileName != "" { + jarPath, err := getJarPath(*extraActionFileName) + if err != nil { + log.Fatal(err) + } + writeUnusedDeps(jarPath, *outputFileName) + return + } + targetPatterns := flag.Args() + + queryCmd := append([]string{"query"}, blazeFlags...) + queryCmd = append( + queryCmd, fmt.Sprintf("kind('(java|android)_*', %s)", strings.Join(targetPatterns, " + "))) + + log.Printf("running: %s %s", *buildTool, strings.Join(queryCmd, " ")) + queryOut, err := cmdWithStderr(*buildTool, queryCmd...).Output() + if err != nil { + log.Print(err) + } + if len(queryOut) == 0 { + fmt.Fprintln(os.Stderr, "found no targets of kind (java|android)_*") + usage() + } + + buildCmd := append(append([]string{"build"}, blazeFlags...), config.DefaultExtraBuildFlags...) + buildCmd = append(buildCmd, buildOptions()...) + + blazeArgs := append(buildCmd, targetPatterns...) + + log.Printf("running: %s %s", *buildTool, strings.Join(blazeArgs, " ")) + cmdWithStderr(*buildTool, blazeArgs...).Run() + blazeBin := blazeInfo(config.DefaultBinDir) + fmt.Fprintf(os.Stderr, "\n") // vertical space between build output and unused_deps output + + anyCommandPrinted := false + for _, label := range strings.Fields(string(queryOut)) { + _, pkg, ruleName := edit.InterpretLabel(label) + depsByJar := directDepParams(inputFileName(blazeBin, pkg, ruleName, "jar-2.params")) + depsToRemove := unusedDeps(inputFileName(blazeBin, pkg, ruleName, "jdeps"), depsByJar) + // TODO(bazel-team): instead of printing, have buildifier-like modes? + anyCommandPrinted = printCommands(label, depsToRemove) || anyCommandPrinted + } + if !anyCommandPrinted { + fmt.Fprintln(os.Stderr, "No unused deps found.") + } +} diff --git a/vendor/github.com/bazelbuild/buildtools/wspace/BUILD.bazel b/vendor/github.com/bazelbuild/buildtools/wspace/BUILD.bazel new file mode 100644 index 00000000000..3b5198c90f5 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/wspace/BUILD.bazel @@ -0,0 +1,18 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = ["workspace.go"], + visibility = ["//visibility:public"], + deps = [ + "//build:go_default_library", + "//file:go_default_library", + ], +) + +go_test( + name = "go_default_test", + size = "small", + srcs = ["workspace_test.go"], + library = ":go_default_library", +) diff --git a/vendor/github.com/bazelbuild/buildtools/wspace/workspace.go b/vendor/github.com/bazelbuild/buildtools/wspace/workspace.go new file mode 100644 index 00000000000..5680d5874fa --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/wspace/workspace.go @@ -0,0 +1,114 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +// Package wspace provides a method to find the root of the bazel tree. +package wspace + +import ( + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/bazelbuild/buildtools/build" +) + +const workspaceFile = "WORKSPACE" + +func alwaysTrue(fi os.FileInfo) bool { + return true +} + +var repoRootFiles = map[string]func(os.FileInfo) bool{ + workspaceFile: alwaysTrue, + ".buckconfig": alwaysTrue, + "pants": func(fi os.FileInfo) bool { + return fi.Mode()&os.ModeType == 0 && fi.Mode()&0100 == 0100 + }, +} + +// findContextPath finds the context path inside of a WORKSPACE-rooted source tree. +func findContextPath(rootDir string) (string, error) { + if rootDir == "" { + return os.Getwd() + } + return rootDir, nil +} + +// FindWorkspaceRoot splits the current code context (the rootDir if present, +// the working directory if not.) It returns the path of the directory +// containing the WORKSPACE file, and the rest. +func FindWorkspaceRoot(rootDir string) (root string, rest string) { + wd, err := findContextPath(rootDir) + if err != nil { + return "", "" + } + if root, err = Find(wd); err != nil { + return "", "" + } + if len(wd) == len(root) { + return root, "" + } + return root, wd[len(root)+1:] +} + +// Find searches from the given dir and up for the WORKSPACE file +// returning the directory containing it, or an error if none found in the tree. +func Find(dir string) (string, error) { + if dir == "" || dir == "/" || dir == "." { + return "", os.ErrNotExist + } + for repoRootFile, fiFunc := range repoRootFiles { + if fi, err := os.Stat(filepath.Join(dir, repoRootFile)); err == nil && fiFunc(fi) { + return dir, nil + } else if !os.IsNotExist(err) { + return "", err + } + } + return Find(filepath.Dir(dir)) +} + +// FindRepoBuildFiles parses the WORKSPACE to find BUILD files for non-Bazel +// external repositories, specifically those defined by one of these rules: +// new_local_repository(), new_git_repository(), new_http_archive() +func FindRepoBuildFiles(root string) (map[string]string, error) { + ws := filepath.Join(root, workspaceFile) + kinds := []string{ + "new_local_repository", + "new_git_repository", + "new_http_archive", + } + data, err := ioutil.ReadFile(ws) + if err != nil { + return nil, err + } + ast, err := build.Parse(ws, data) + if err != nil { + return nil, err + } + files := make(map[string]string) + for _, kind := range kinds { + for _, r := range ast.Rules(kind) { + buildFile := r.AttrString("build_file") + if buildFile == "" { + continue + } + buildFile = strings.Replace(buildFile, ":", "/", -1) + files[r.Name()] = filepath.Join(root, buildFile) + } + } + return files, nil +} diff --git a/vendor/github.com/bazelbuild/buildtools/wspace/workspace_test.go b/vendor/github.com/bazelbuild/buildtools/wspace/workspace_test.go new file mode 100644 index 00000000000..6b3cacb78e2 --- /dev/null +++ b/vendor/github.com/bazelbuild/buildtools/wspace/workspace_test.go @@ -0,0 +1,114 @@ +/* +Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +package wspace + +import ( + "io/ioutil" + "os" + "path/filepath" + "reflect" + "testing" +) + +type testCase struct { + input string + expectedRoot, expectedRest string +} + +func runBasicTestWithRepoRootFile(t *testing.T, repoRootFile string) { + tmp, err := ioutil.TempDir("", "") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(tmp) + if err := os.MkdirAll(filepath.Join(tmp, "a", "b", "c"), 0755); err != nil { + t.Fatal(err) + } + if err := ioutil.WriteFile(filepath.Join(tmp, repoRootFile), nil, 0755); err != nil { + t.Fatal(err) + } + if err := ioutil.WriteFile(filepath.Join(tmp, "a", "b", repoRootFile), nil, 0755); err != nil { + t.Fatal(err) + } + + for _, tc := range []testCase{ + {tmp, tmp, ""}, + {filepath.Join(tmp, "a"), tmp, "a"}, + {filepath.Join(tmp, "a", "b"), filepath.Join(tmp, "a", "b"), ""}, + {filepath.Join(tmp, "a", "b", "c"), filepath.Join(tmp, "a", "b"), "c"}, + {"a", "", ""}, // error case + } { + root, rest := FindWorkspaceRoot(tc.input) + if root != tc.expectedRoot || rest != tc.expectedRest { + t.Errorf("FindWorkspaceRoot(%q) = %q, %q; want %q, %q", tc.input, root, rest, tc.expectedRoot, tc.expectedRest) + } + } +} + +func TestBasic(t *testing.T) { + runBasicTestWithRepoRootFile(t, ".buckconfig") + runBasicTestWithRepoRootFile(t, workspaceFile) +} + +func TestFindRepoBuildfiles(t *testing.T) { + tmp, err := ioutil.TempDir("", "") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(tmp) + workspace := []byte(` +new_git_repository( + name = "a", + build_file = "a.BUILD", +) +new_http_archive( + name = "b", + build_file = "b.BUILD", +) +new_local_repository( + name = "c", + build_file = "c.BUILD", +) +git_repository( + name = "d", + build_file = "d.BUILD", +) +new_git_repository( + name = "e", + build_file_content = "n/a", +) +new_http_archive( + name = "f", + build_file = "//third_party:f.BUILD", +) +`) + if err := ioutil.WriteFile(filepath.Join(tmp, workspaceFile), workspace, 0755); err != nil { + t.Fatal(err) + } + files, err := FindRepoBuildFiles(tmp) + if err != nil { + t.Fatal(err) + } + expected := map[string]string{ + "a": filepath.Join(tmp, "a.BUILD"), + "b": filepath.Join(tmp, "b.BUILD"), + "c": filepath.Join(tmp, "c.BUILD"), + "f": filepath.Join(tmp, "third_party/f.BUILD"), + } + if !reflect.DeepEqual(files, expected) { + t.Errorf("FileRepoBuildFiles(`%s`) = %q; want %q", workspace, files, expected) + } +} diff --git a/vendor/github.com/coreos/etcd/Documentation/learning/auth_design.md b/vendor/github.com/coreos/etcd/Documentation/learning/auth_design.md index a8c774e6fdc..52c979731bf 100644 --- a/vendor/github.com/coreos/etcd/Documentation/learning/auth_design.md +++ b/vendor/github.com/coreos/etcd/Documentation/learning/auth_design.md @@ -1,77 +1,77 @@ -# etcd v3 authentication design - -## Why not reuse the v2 auth system? - -The v3 protocol uses gRPC as its transport instead of a RESTful interface like v2. This new protocol provides an opportunity to iterate on and improve the v2 design. For example, v3 auth has connection based authentication, rather than v2's slower per-request authentication. Additionally, v2 auth's semantics tend to be unwieldy in practice with respect to reasoning about consistency, which will be described in the next sections. For v3, there is a well-defined description and implementation of the authentication mechanism which fixes the deficiencies in the v2 auth system. - -### Functionality requirements - -* Per connection authentication, not per request - * User ID + password based authentication implemented for the gRPC API - * Authentication must be refreshed after auth policy changes -* Its functionality should be as simple and useful as v2 - * v3 provides a flat key space, unlike the directory structure of v2. Permission checking will be provided as interval matching. -* It should have stronger consistency guarantees than v2 auth - -### Main required changes - -* A client must create a dedicated connection only for authentication before sending authenticated requests -* Add permission information (user ID and authorized revision) to the Raft commands (`etcdserverpb.InternalRaftRequest`) -* Every request is permission checked in the state machine layer, rather than API layer - -### Permission metadata consistency - -The metadata for auth should also be stored and managed in the storage controlled by etcd's Raft protocol like other data stored in etcd. It is required for not sacrificing availability and consistency of the entire etcd cluster. If reading or writing the metadata (e.g. permission information) needs an agreement of every node (more than quorum), single node failure can stop the entire cluster. Requiring all nodes to agree at once means that checking ordinary read/write requests cannot be completed if any cluster member is down, even if the cluster has an available quorum. This unanimous scheme ultimately degrades cluster availability; quorum based consensus from raft should suffice since agreement follows from consistent ordering. - -The authentication mechanism in the etcd v2 protocol has a tricky part because the metadata consistency should work as in the above, but does not: each permission check is processed by the etcd member that receives the client request (etcdserver/api/v2http/client.go), including follower members. Therefore, it's possible the check may be based on stale metadata. - - -This staleness means that auth configuration cannot be reflected as soon as operators execute etcdctl. Therefore there is no way to know how long the stale metadata is active. Practically, the configuration change is reflected immediately after the command execution. However, in some cases of heavy load, the inconsistent state can be prolonged and it might result in counter-intuitive situations for users and developers. It requires a workaround like this: https://github.com/coreos/etcd/pull/4317#issuecomment-179037582 - -### Inconsistent permissions are unsafe for linearized requests - -Inconsistent authentication state is most serious for writes. Even if an operator disables write on a user, if the write is only ordered with respect to the key value store but not the authentication system, it's possible the write will complete successfully. Without ordering on both the auth store and the key-value store, the system will be susceptible to stale permission attacks. - -Therefore, the permission checking logic should be added to the state machine of etcd. Each state machine should check the requests based on its permission information in the apply phase (so the auth information must not be stale). - -## Design and implementation - -### Authentication - -At first, a client must create a gRPC connection only to authenticate its user ID and password. An etcd server will respond with an authentication reply. The reponse will be an authentication token on success or an error on failure. The client can use its authentication token to present its credentials to etcd when making API requests. - -The client connection used to request the authentication token is typically thrown away; it cannot carry the new token's credentials. This is because gRPC doesn't provide a way for adding per RPC credential after creation of the connection (calling `grpc.Dial()`). Therefore, a client cannot assign a token to its connection that is obtained through the connection. The client needs a new connection for using the token. - -#### Notes on the implementation of `Authenticate()` RPC - -`Authenticate()` RPC generates an authentication token based on a given user name and password. etcd saves and checks a configured password and a given password using Go's `bcrypt` package. By design, `bcrypt`'s password checking mechanism is computationally expensive, taking nearly 100ms on an ordinary x64 server. Therefore, performing this check in the state machine apply phase would cause performance trouble: the entire etcd cluster can only serve almost 10 `Authenticate()` requests per second. - -For good performance, the v3 auth mechanism checks passwords in etcd's API layer, where it can be parallelized outside of raft. However, this can lead to potential time-of-check/time-of-use (TOCTOU) permission lapses: -1. client A sends a request `Authenticate()` -1. the API layer processes the password checking part of `Authenticate()` -1. another client B sends a request of `ChangePassword()` and the server completes it -1. the state machine layer processes the part of getting a revision number for the `Authenticate()` from A -1. the server returns a success to A -1. now A is authenticated on an obsolete password - -For avoiding such a situation, the API layer performs *version number validation* based on the revision number of the auth store. During password checking, the API layer saves the revision number of auth store. After successful password checking, the API layer compares the saved revision number and the latest revision number. If the numbers differ, it means someone else updated the auth metadata. So it retries the checking. With this mechanism, the successful password checking based on the obsolete password can be avoided. - -### Resolving a token in the API layer - -After authenticating with `Authenticate()`, a client can create a gRPC connection as it would without auth. In addition to the existing initialization process, the client must associate the token with the newly created connection. `grpc.WithPerRPCCredentials()` provides the functionality for this purpose. - -Every authenticated request from the client has a token. The token can be obtained with `grpc.metadata.FromIncomingContext()` in the server side. The server can obtain who is issuing the request and when the user was authorized. The information will be filled by the API layer in the header (`etcdserverpb.RequestHeader.Username` and `etcdserverpb.RequestHeader.AuthRevision`) of a raft log entry (`etcdserverpb.InternalRaftRequest`). - -### Checking permission in the state machine - -The auth info in `etcdserverpb.RequestHeader` is checked in the apply phase of the state machine. This step checks the user is granted permission to requested keys on the latest revision of auth store. - -### Two types of tokens: simple and JWT - -There are two kinds of token types: simple and JWT. The simple token isn't designed for production use cases. Its tokens aren't cryptographically signed and servers must statefully track token-user correspondence; it is meant for development testing. JWT tokens should be used for production deployments since it is cryptographically signed and verified. From the implementation perspective, JWT is stateless. Its token can include metadata including username and revision, so servers don't need to remember correspondence between tokens and the metadata. - -## Notes on the difference between KVS models and file system models - -etcd v3 is a KVS, not a file system. So the permissions can be granted to the users in form of an exact key name or a key range like `["start key", "end key")`. It means that granting a permission of a nonexistent key is possible. Users should care about unintended permission granting. In a case of file system like system (e.g. Chubby or ZooKeeper), an inode like data structure can include the permission information. So granting permission to a nonexist key won't be possible (except the case of sticky bits). - -The etcd v3 model requires multiple lookup of the metadata unlike the file system like systems. The worst case lookup cost will be sum the user's total granted keys and intervals. The cost cannot be avoided because v3's flat key space is completely different from Unix's file system model (every inode includes permission metadata). Practically the cost won’t be a serious problem because the metadata is small enough to benefit from caching. +# etcd v3 authentication design + +## Why not reuse the v2 auth system? + +The v3 protocol uses gRPC as its transport instead of a RESTful interface like v2. This new protocol provides an opportunity to iterate on and improve the v2 design. For example, v3 auth has connection based authentication, rather than v2's slower per-request authentication. Additionally, v2 auth's semantics tend to be unwieldy in practice with respect to reasoning about consistency, which will be described in the next sections. For v3, there is a well-defined description and implementation of the authentication mechanism which fixes the deficiencies in the v2 auth system. + +### Functionality requirements + +* Per connection authentication, not per request + * User ID + password based authentication implemented for the gRPC API + * Authentication must be refreshed after auth policy changes +* Its functionality should be as simple and useful as v2 + * v3 provides a flat key space, unlike the directory structure of v2. Permission checking will be provided as interval matching. +* It should have stronger consistency guarantees than v2 auth + +### Main required changes + +* A client must create a dedicated connection only for authentication before sending authenticated requests +* Add permission information (user ID and authorized revision) to the Raft commands (`etcdserverpb.InternalRaftRequest`) +* Every request is permission checked in the state machine layer, rather than API layer + +### Permission metadata consistency + +The metadata for auth should also be stored and managed in the storage controlled by etcd's Raft protocol like other data stored in etcd. It is required for not sacrificing availability and consistency of the entire etcd cluster. If reading or writing the metadata (e.g. permission information) needs an agreement of every node (more than quorum), single node failure can stop the entire cluster. Requiring all nodes to agree at once means that checking ordinary read/write requests cannot be completed if any cluster member is down, even if the cluster has an available quorum. This unanimous scheme ultimately degrades cluster availability; quorum based consensus from raft should suffice since agreement follows from consistent ordering. + +The authentication mechanism in the etcd v2 protocol has a tricky part because the metadata consistency should work as in the above, but does not: each permission check is processed by the etcd member that receives the client request (etcdserver/api/v2http/client.go), including follower members. Therefore, it's possible the check may be based on stale metadata. + + +This staleness means that auth configuration cannot be reflected as soon as operators execute etcdctl. Therefore there is no way to know how long the stale metadata is active. Practically, the configuration change is reflected immediately after the command execution. However, in some cases of heavy load, the inconsistent state can be prolonged and it might result in counter-intuitive situations for users and developers. It requires a workaround like this: https://github.com/coreos/etcd/pull/4317#issuecomment-179037582 + +### Inconsistent permissions are unsafe for linearized requests + +Inconsistent authentication state is most serious for writes. Even if an operator disables write on a user, if the write is only ordered with respect to the key value store but not the authentication system, it's possible the write will complete successfully. Without ordering on both the auth store and the key-value store, the system will be susceptible to stale permission attacks. + +Therefore, the permission checking logic should be added to the state machine of etcd. Each state machine should check the requests based on its permission information in the apply phase (so the auth information must not be stale). + +## Design and implementation + +### Authentication + +At first, a client must create a gRPC connection only to authenticate its user ID and password. An etcd server will respond with an authentication reply. The reponse will be an authentication token on success or an error on failure. The client can use its authentication token to present its credentials to etcd when making API requests. + +The client connection used to request the authentication token is typically thrown away; it cannot carry the new token's credentials. This is because gRPC doesn't provide a way for adding per RPC credential after creation of the connection (calling `grpc.Dial()`). Therefore, a client cannot assign a token to its connection that is obtained through the connection. The client needs a new connection for using the token. + +#### Notes on the implementation of `Authenticate()` RPC + +`Authenticate()` RPC generates an authentication token based on a given user name and password. etcd saves and checks a configured password and a given password using Go's `bcrypt` package. By design, `bcrypt`'s password checking mechanism is computationally expensive, taking nearly 100ms on an ordinary x64 server. Therefore, performing this check in the state machine apply phase would cause performance trouble: the entire etcd cluster can only serve almost 10 `Authenticate()` requests per second. + +For good performance, the v3 auth mechanism checks passwords in etcd's API layer, where it can be parallelized outside of raft. However, this can lead to potential time-of-check/time-of-use (TOCTOU) permission lapses: +1. client A sends a request `Authenticate()` +1. the API layer processes the password checking part of `Authenticate()` +1. another client B sends a request of `ChangePassword()` and the server completes it +1. the state machine layer processes the part of getting a revision number for the `Authenticate()` from A +1. the server returns a success to A +1. now A is authenticated on an obsolete password + +For avoiding such a situation, the API layer performs *version number validation* based on the revision number of the auth store. During password checking, the API layer saves the revision number of auth store. After successful password checking, the API layer compares the saved revision number and the latest revision number. If the numbers differ, it means someone else updated the auth metadata. So it retries the checking. With this mechanism, the successful password checking based on the obsolete password can be avoided. + +### Resolving a token in the API layer + +After authenticating with `Authenticate()`, a client can create a gRPC connection as it would without auth. In addition to the existing initialization process, the client must associate the token with the newly created connection. `grpc.WithPerRPCCredentials()` provides the functionality for this purpose. + +Every authenticated request from the client has a token. The token can be obtained with `grpc.metadata.FromIncomingContext()` in the server side. The server can obtain who is issuing the request and when the user was authorized. The information will be filled by the API layer in the header (`etcdserverpb.RequestHeader.Username` and `etcdserverpb.RequestHeader.AuthRevision`) of a raft log entry (`etcdserverpb.InternalRaftRequest`). + +### Checking permission in the state machine + +The auth info in `etcdserverpb.RequestHeader` is checked in the apply phase of the state machine. This step checks the user is granted permission to requested keys on the latest revision of auth store. + +### Two types of tokens: simple and JWT + +There are two kinds of token types: simple and JWT. The simple token isn't designed for production use cases. Its tokens aren't cryptographically signed and servers must statefully track token-user correspondence; it is meant for development testing. JWT tokens should be used for production deployments since it is cryptographically signed and verified. From the implementation perspective, JWT is stateless. Its token can include metadata including username and revision, so servers don't need to remember correspondence between tokens and the metadata. + +## Notes on the difference between KVS models and file system models + +etcd v3 is a KVS, not a file system. So the permissions can be granted to the users in form of an exact key name or a key range like `["start key", "end key")`. It means that granting a permission of a nonexistent key is possible. Users should care about unintended permission granting. In a case of file system like system (e.g. Chubby or ZooKeeper), an inode like data structure can include the permission information. So granting permission to a nonexist key won't be possible (except the case of sticky bits). + +The etcd v3 model requires multiple lookup of the metadata unlike the file system like systems. The worst case lookup cost will be sum the user's total granted keys and intervals. The cost cannot be avoided because v3's flat key space is completely different from Unix's file system model (every inode includes permission metadata). Practically the cost won’t be a serious problem because the metadata is small enough to benefit from caching. diff --git a/vendor/github.com/kubernetes/repo-infra/.travis.yml b/vendor/github.com/kubernetes/repo-infra/.travis.yml index 2670decd6b7..a483f0731d4 100644 --- a/vendor/github.com/kubernetes/repo-infra/.travis.yml +++ b/vendor/github.com/kubernetes/repo-infra/.travis.yml @@ -13,13 +13,13 @@ before_install: install: - sudo apt-get install bazel - go get -u github.com/alecthomas/gometalinter - - go get -u github.com/bazelbuild/buildifier/buildifier + - go get -u github.com/bazelbuild/buildtools/buildifier - gometalinter --install - go install ./... script: - verify/verify-boilerplate.sh --rootdir="$GOPATH/src/k8s.io/repo-infra" -v - verify/verify-go-src.sh --rootdir "$GOPATH/src/k8s.io/repo-infra" -v - - buildifier -mode=check $(find . -name BUILD -o -name '*.bzl' -type f) + - buildifier -mode=check $(find . -name BUILD -o -name '*.bzl' -type f -not -wholename '*/vendor/*') - kazel --print-diff --validate - bazel build //... diff --git a/vendor/github.com/kubernetes/repo-infra/Gopkg.lock b/vendor/github.com/kubernetes/repo-infra/Gopkg.lock new file mode 100644 index 00000000000..b2a82ec6db4 --- /dev/null +++ b/vendor/github.com/kubernetes/repo-infra/Gopkg.lock @@ -0,0 +1,27 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + branch = "master" + name = "github.com/bazelbuild/buildtools" + packages = ["build","tables"] + revision = "1a9c38e0df9397d033a1ca535596de5a7c1cf18f" + +[[projects]] + branch = "master" + name = "github.com/golang/glog" + packages = ["."] + revision = "23def4e6c14b4da8ac2ed8007337bc5eb5007998" + +[[projects]] + branch = "master" + name = "golang.org/x/build" + packages = ["pargzip"] + revision = "125f04e1fc4b4cbfed95e5dd72a435fcb3847608" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "8da72c740ea6e29bad8c209f9bb6c73398cff00fe62ff07d83d20a3c74a3e91d" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/kubernetes/repo-infra/Gopkg.toml b/vendor/github.com/kubernetes/repo-infra/Gopkg.toml new file mode 100644 index 00000000000..dcdc57ed78a --- /dev/null +++ b/vendor/github.com/kubernetes/repo-infra/Gopkg.toml @@ -0,0 +1,12 @@ + +[[constraint]] + branch = "master" + name = "github.com/bazelbuild/buildtools" + +[[constraint]] + branch = "master" + name = "github.com/golang/glog" + +[[constraint]] + branch = "master" + name = "golang.org/x/build" diff --git a/vendor/github.com/kubernetes/repo-infra/WORKSPACE b/vendor/github.com/kubernetes/repo-infra/WORKSPACE index ffe33ffa587..0dcdea19e13 100644 --- a/vendor/github.com/kubernetes/repo-infra/WORKSPACE +++ b/vendor/github.com/kubernetes/repo-infra/WORKSPACE @@ -2,7 +2,7 @@ workspace(name = "io_kubernetes_build") git_repository( name = "io_bazel_rules_go", - commit = "fabe06345cff38edfe49a18ec3705e781698e98c", + commit = "fd3021297ae02a86c32adf2b52fd7fe77d033282", remote = "https://github.com/bazelbuild/rules_go.git", ) diff --git a/vendor/github.com/kubernetes/repo-infra/code-of-conduct.md b/vendor/github.com/kubernetes/repo-infra/code-of-conduct.md new file mode 100644 index 00000000000..0d15c00cf32 --- /dev/null +++ b/vendor/github.com/kubernetes/repo-infra/code-of-conduct.md @@ -0,0 +1,3 @@ +# Kubernetes Community Code of Conduct + +Please refer to our [Kubernetes Community Code of Conduct](https://git.k8s.io/community/code-of-conduct.md) diff --git a/vendor/github.com/kubernetes/repo-infra/defs/go.bzl b/vendor/github.com/kubernetes/repo-infra/defs/go.bzl index 9d60b874932..1a3d01b0e66 100644 --- a/vendor/github.com/kubernetes/repo-infra/defs/go.bzl +++ b/vendor/github.com/kubernetes/repo-infra/defs/go.bzl @@ -63,7 +63,7 @@ def _go_genrule_impl(ctx): for dep in ctx.attr.go_deps: lib = dep[GoLibrary] - all_srcs += lib.srcs + all_srcs += lib.package.srcs for transitive_lib in lib.transitive: all_srcs += transitive_lib.srcs diff --git a/vendor/github.com/kubernetes/repo-infra/kazel/BUILD b/vendor/github.com/kubernetes/repo-infra/kazel/BUILD index f938cdf46f6..a6eb7445698 100644 --- a/vendor/github.com/kubernetes/repo-infra/kazel/BUILD +++ b/vendor/github.com/kubernetes/repo-infra/kazel/BUILD @@ -27,7 +27,7 @@ go_library( importpath = "k8s.io/repo-infra/kazel", tags = ["automanaged"], deps = [ - "//vendor/github.com/bazelbuild/buildifier/core:go_default_library", + "//vendor/github.com/bazelbuild/buildtools/build:go_default_library", "//vendor/github.com/golang/glog:go_default_library", ], ) diff --git a/vendor/github.com/kubernetes/repo-infra/kazel/kazel.go b/vendor/github.com/kubernetes/repo-infra/kazel/kazel.go index 492587f0b77..b1b4e4aad29 100644 --- a/vendor/github.com/kubernetes/repo-infra/kazel/kazel.go +++ b/vendor/github.com/kubernetes/repo-infra/kazel/kazel.go @@ -30,7 +30,7 @@ import ( "sort" "strings" - bzl "github.com/bazelbuild/buildifier/core" + bzl "github.com/bazelbuild/buildtools/build" "github.com/golang/glog" ) diff --git a/vendor/github.com/kubernetes/repo-infra/kazel/sourcerer.go b/vendor/github.com/kubernetes/repo-infra/kazel/sourcerer.go index e89b0f87ee1..e497333bef8 100644 --- a/vendor/github.com/kubernetes/repo-infra/kazel/sourcerer.go +++ b/vendor/github.com/kubernetes/repo-infra/kazel/sourcerer.go @@ -21,7 +21,7 @@ import ( "io/ioutil" "path/filepath" - bzl "github.com/bazelbuild/buildifier/core" + bzl "github.com/bazelbuild/buildtools/build" ) const ( diff --git a/vendor/github.com/kubernetes/repo-infra/verify/go_install_from_commit.sh b/vendor/github.com/kubernetes/repo-infra/verify/go_install_from_commit.sh new file mode 100755 index 00000000000..ee6fd0d9c1f --- /dev/null +++ b/vendor/github.com/kubernetes/repo-infra/verify/go_install_from_commit.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +PKG=$1 +COMMIT=$2 +export GOPATH=$3 +export GOBIN="$GOPATH/bin" + +go get -d -u "${PKG}" +cd "${GOPATH}/src/${PKG}" +git checkout -q "${COMMIT}" +go install "${PKG}" diff --git a/vendor/github.com/kubernetes/repo-infra/verify/update-bazel.sh b/vendor/github.com/kubernetes/repo-infra/verify/update-bazel.sh new file mode 100755 index 00000000000..2a0cac6a281 --- /dev/null +++ b/vendor/github.com/kubernetes/repo-infra/verify/update-bazel.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +REPOINFRA_ROOT=$(git rev-parse --show-toplevel) +# https://github.com/kubernetes/test-infra/issues/5699#issuecomment-348350792 +cd ${REPOINFRA_ROOT} +TMP_GOPATH=$(mktemp -d) + +# no unit tests in vendor +# previously we used godeps which did this, but `dep` does not handle this +# properly yet. some of these tests don't build well. see: +# ref: https://github.com/kubernetes/test-infra/pull/5411 +find ${REPOINFRA_ROOT}/vendor/ -name "*_test.go" -delete + +# manually remove BUILD file for github.com/bazelbuild/buildtools/BUILD.bazel if it +# exists; there is a specific test_suite rule that breaks importing +rm -f ${REPOINFRA_ROOT}/vendor/github.com/bazelbuild/buildtools/BUILD.bazel + +GOBIN="${TMP_GOPATH}/bin" go get github.com/kubernetes/repo-infra/kazel + +"${REPOINFRA_ROOT}/verify/go_install_from_commit.sh" \ + github.com/bazelbuild/bazel-gazelle/cmd/gazelle \ + 0.8 \ + "${TMP_GOPATH}" + +touch "${REPOINFRA_ROOT}/vendor/BUILD" + +"${TMP_GOPATH}/bin/gazelle" fix \ + -build_file_name=BUILD,BUILD.bazel \ + -external=vendored \ + -mode=fix \ + -repo_root="${REPOINFRA_ROOT}" + +"${TMP_GOPATH}/bin/kazel" -root="${REPOINFRA_ROOT}" diff --git a/vendor/github.com/kubernetes/repo-infra/verify/verify-bazel.sh b/vendor/github.com/kubernetes/repo-infra/verify/verify-bazel.sh new file mode 100755 index 00000000000..5bc8efea7cc --- /dev/null +++ b/vendor/github.com/kubernetes/repo-infra/verify/verify-bazel.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +REPOINFRA_ROOT=$(git rev-parse --show-toplevel) +TMP_GOPATH=$(mktemp -d) + +GOBIN="${TMP_GOPATH}/bin" go get github.com/kubernetes/repo-infra/kazel + +"${REPOINFRA_ROOT}/verify/go_install_from_commit.sh" \ + github.com/bazelbuild/bazel-gazelle/cmd/gazelle \ + 0.8 \ + "${TMP_GOPATH}" + +touch "${REPOINFRA_ROOT}/vendor/BUILD" + +gazelle_diff=$("${TMP_GOPATH}/bin/gazelle" fix \ + -build_file_name=BUILD,BUILD.bazel \ + -external=vendored \ + -mode=diff \ + -repo_root="${REPOINFRA_ROOT}") + +kazel_diff=$("${TMP_GOPATH}/bin/kazel" \ + -dry-run \ + -print-diff \ + -root="${REPOINFRA_ROOT}") + +# check if there are vendor/*_test.go +# previously we used godeps which did this, but `dep` does not handle this +# properly yet. some of these tests don't build well. see: +# ref: https://github.com/kubernetes/test-infra/pull/5411 +vendor_tests=$(find ${REPOINFRA_ROOT}/vendor/ -name "*_test.go" | wc -l) + +if [[ -n "${gazelle_diff}" || -n "${kazel_diff}" || "${vendor_tests}" -ne "0" ]]; then + echo "${gazelle_diff}" + echo "${kazel_diff}" + echo "number of vendor/*_test.go: ${vendor_tests} (want: 0)" + echo + echo "Run ./verify/update-bazel.sh" + exit 1 +fi diff --git a/vendor/github.com/onsi/gomega/matchers/test_data/xml/sample_05.xml b/vendor/github.com/onsi/gomega/matchers/test_data/xml/sample_05.xml index 2dac05888b9..de15a6a558c 100644 --- a/vendor/github.com/onsi/gomega/matchers/test_data/xml/sample_05.xml +++ b/vendor/github.com/onsi/gomega/matchers/test_data/xml/sample_05.xml @@ -1,211 +1,211 @@ - - - - Empire Burlesque - Bob Dylan - USA - Columbia - 10.90 - 1985 - - - Hide your heart - Bonnie Tyler - UK - CBS Records - 9.90 - 1988 - - - Greatest Hits - Dolly Parton - USA - RCA - 9.90 - 1982 - - - Still got the blues - Gary Moore - UK - Virgin records - 10.20 - 1990 - - - Eros - Eros Ramazzotti - EU - BMG - 9.90 - 1997 - - - One night only - Bee Gees - UK - Polydor - 10.90 - 1998 - - - Sylvias Mother - Dr.Hook - UK - CBS - 8.10 - 1973 - - - Maggie May - Rod Stewart - UK - Pickwick - 8.50 - 1990 - - - Romanza - Andrea Bocelli - EU - Polydor - 10.80 - 1996 - - - When a man loves a woman - Percy Sledge - USA - Atlantic - 8.70 - 1987 - - - Black angel - Savage Rose - EU - Mega - 10.90 - 1995 - - - 1999 Grammy Nominees - Many - USA - Grammy - 10.20 - 1999 - - - For the good times - Kenny Rogers - UK - Mucik Master - 8.70 - 1995 - - - Big Willie style - Will Smith - USA - Columbia - 9.90 - 1997 - - - Tupelo Honey - Van Morrison - UK - Polydor - 8.20 - 1971 - - - Soulsville - Jorn Hoel - Norway - WEA - 7.90 - 1996 - - - The very best of - Cat Stevens - UK - Island - 8.90 - 1990 - - - Stop - Sam Brown - UK - A and M - 8.90 - 1988 - - - Bridge of Spies - T'Pau - UK - Siren - 7.90 - 1987 - - - Private Dancer - Tina Turner - UK - Capitol - 8.90 - 1983 - - - Midt om natten - Kim Larsen - EU - Medley - 7.80 - 1983 - - - Pavarotti Gala Concert - Luciano Pavarotti - UK - DECCA - 9.90 - 1991 - - - The dock of the bay - Otis Redding - USA - Stax Records - 7.90 - 1968 - - - Picture book - Simply Red - EU - Elektra - 7.20 - 1985 - - - Red - The Communards - UK - London - 7.80 - 1987 - - - Unchain my heart - Joe Cocker - USA - EMI - 8.20 - 1987 - - + + + + Empire Burlesque + Bob Dylan + USA + Columbia + 10.90 + 1985 + + + Hide your heart + Bonnie Tyler + UK + CBS Records + 9.90 + 1988 + + + Greatest Hits + Dolly Parton + USA + RCA + 9.90 + 1982 + + + Still got the blues + Gary Moore + UK + Virgin records + 10.20 + 1990 + + + Eros + Eros Ramazzotti + EU + BMG + 9.90 + 1997 + + + One night only + Bee Gees + UK + Polydor + 10.90 + 1998 + + + Sylvias Mother + Dr.Hook + UK + CBS + 8.10 + 1973 + + + Maggie May + Rod Stewart + UK + Pickwick + 8.50 + 1990 + + + Romanza + Andrea Bocelli + EU + Polydor + 10.80 + 1996 + + + When a man loves a woman + Percy Sledge + USA + Atlantic + 8.70 + 1987 + + + Black angel + Savage Rose + EU + Mega + 10.90 + 1995 + + + 1999 Grammy Nominees + Many + USA + Grammy + 10.20 + 1999 + + + For the good times + Kenny Rogers + UK + Mucik Master + 8.70 + 1995 + + + Big Willie style + Will Smith + USA + Columbia + 9.90 + 1997 + + + Tupelo Honey + Van Morrison + UK + Polydor + 8.20 + 1971 + + + Soulsville + Jorn Hoel + Norway + WEA + 7.90 + 1996 + + + The very best of + Cat Stevens + UK + Island + 8.90 + 1990 + + + Stop + Sam Brown + UK + A and M + 8.90 + 1988 + + + Bridge of Spies + T'Pau + UK + Siren + 7.90 + 1987 + + + Private Dancer + Tina Turner + UK + Capitol + 8.90 + 1983 + + + Midt om natten + Kim Larsen + EU + Medley + 7.80 + 1983 + + + Pavarotti Gala Concert + Luciano Pavarotti + UK + DECCA + 9.90 + 1991 + + + The dock of the bay + Otis Redding + USA + Stax Records + 7.90 + 1968 + + + Picture book + Simply Red + EU + Elektra + 7.20 + 1985 + + + Red + The Communards + UK + London + 7.80 + 1987 + + + Unchain my heart + Joe Cocker + USA + EMI + 8.20 + 1987 + + diff --git a/vendor/github.com/petar/GoLLRB/.gitignore b/vendor/github.com/petar/GoLLRB/.gitignore new file mode 100644 index 00000000000..e333b2dbf73 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/.gitignore @@ -0,0 +1,23 @@ +syntax:glob +*.[568ao] +*.ao +*.so +*.pyc +*.swp +*.swo +._* +.nfs.* +[568a].out +*~ +*.orig +*.pb.go +core +_obj +_test +src/pkg/Make.deps +_testmain.go + +syntax:regexp +^pkg/ +^src/cmd/(.*)/6?\1$ +^.*/core.[0-9]*$ diff --git a/vendor/github.com/petar/GoLLRB/AUTHORS b/vendor/github.com/petar/GoLLRB/AUTHORS new file mode 100644 index 00000000000..78d1de49566 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/AUTHORS @@ -0,0 +1,4 @@ +Petar Maymounkov +Vadim Vygonets +Ian Smith +Martin Bruse diff --git a/vendor/github.com/petar/GoLLRB/LICENSE b/vendor/github.com/petar/GoLLRB/LICENSE new file mode 100644 index 00000000000..b75312c787d --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2010, Petar Maymounkov +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +(*) Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +(*) Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +(*) Neither the name of Petar Maymounkov nor the names of its contributors may be +used to endorse or promote products derived from this software without specific +prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/petar/GoLLRB/README.md b/vendor/github.com/petar/GoLLRB/README.md new file mode 100644 index 00000000000..742ca0bd560 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/README.md @@ -0,0 +1,66 @@ +# GoLLRB + +GoLLRB is a Left-Leaning Red-Black (LLRB) implementation of 2-3 balanced binary +search trees in Go Language. + +## Overview + +As of this writing and to the best of the author's knowledge, +Go still does not have a balanced binary search tree (BBST) data structure. +These data structures are quite useful in a variety of cases. A BBST maintains +elements in sorted order under dynamic updates (inserts and deletes) and can +support various order-specific queries. Furthermore, in practice one often +implements other common data structures like Priority Queues, using BBST's. + +2-3 trees (a type of BBST's), as well as the runtime-similar 2-3-4 trees, are +the de facto standard BBST algoritms found in implementations of Python, Java, +and other libraries. The LLRB method of implementing 2-3 trees is a recent +improvement over the traditional implementation. The LLRB approach was +discovered relatively recently (in 2008) by Robert Sedgewick of Princeton +University. + +GoLLRB is a Go implementation of LLRB 2-3 trees. + +## Maturity + +GoLLRB has been used in some pretty heavy-weight machine learning tasks over many gigabytes of data. +I consider it to be in stable, perhaps even production, shape. There are no known bugs. + +## Installation + +With a healthy Go Language installed, simply run `go get github.com/petar/GoLLRB/llrb` + +## Example + + package main + + import ( + "fmt" + "github.com/petar/GoLLRB/llrb" + ) + + func lessInt(a, b interface{}) bool { return a.(int) < b.(int) } + + func main() { + tree := llrb.New(lessInt) + tree.ReplaceOrInsert(1) + tree.ReplaceOrInsert(2) + tree.ReplaceOrInsert(3) + tree.ReplaceOrInsert(4) + tree.DeleteMin() + tree.Delete(4) + c := tree.IterAscend() + for { + u := <-c + if u == nil { + break + } + fmt.Printf("%d\n", int(u.(int))) + } + } + +## About + +GoLLRB was written by [Petar Maymounkov](http://pdos.csail.mit.edu/~petar/). + +Follow me on [Twitter @maymounkov](http://www.twitter.com/maymounkov)! diff --git a/vendor/github.com/petar/GoLLRB/example/ex1.go b/vendor/github.com/petar/GoLLRB/example/ex1.go new file mode 100644 index 00000000000..6ebe4a68627 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/example/ex1.go @@ -0,0 +1,26 @@ +package main + +import ( + "fmt" + "github.com/petar/GoLLRB/llrb" +) + +func lessInt(a, b interface{}) bool { return a.(int) < b.(int) } + +func main() { + tree := llrb.New(lessInt) + tree.ReplaceOrInsert(1) + tree.ReplaceOrInsert(2) + tree.ReplaceOrInsert(3) + tree.ReplaceOrInsert(4) + tree.DeleteMin() + tree.Delete(4) + c := tree.IterAscend() + for { + u := <-c + if u == nil { + break + } + fmt.Printf("%d\n", int(u.(int))) + } +} diff --git a/vendor/github.com/petar/GoLLRB/llrb/avgvar.go b/vendor/github.com/petar/GoLLRB/llrb/avgvar.go new file mode 100644 index 00000000000..2d7e2a3262d --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/avgvar.go @@ -0,0 +1,39 @@ +// Copyright 2010 Petar Maymounkov. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package llrb + +import "math" + +// avgVar maintains the average and variance of a stream of numbers +// in a space-efficient manner. +type avgVar struct { + count int64 + sum, sumsq float64 +} + +func (av *avgVar) Init() { + av.count = 0 + av.sum = 0.0 + av.sumsq = 0.0 +} + +func (av *avgVar) Add(sample float64) { + av.count++ + av.sum += sample + av.sumsq += sample * sample +} + +func (av *avgVar) GetCount() int64 { return av.count } + +func (av *avgVar) GetAvg() float64 { return av.sum / float64(av.count) } + +func (av *avgVar) GetTotal() float64 { return av.sum } + +func (av *avgVar) GetVar() float64 { + a := av.GetAvg() + return av.sumsq/float64(av.count) - a*a +} + +func (av *avgVar) GetStdDev() float64 { return math.Sqrt(av.GetVar()) } diff --git a/vendor/github.com/petar/GoLLRB/llrb/iterator.go b/vendor/github.com/petar/GoLLRB/llrb/iterator.go new file mode 100644 index 00000000000..ee7b27f442b --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/iterator.go @@ -0,0 +1,93 @@ +package llrb + +type ItemIterator func(i Item) bool + +//func (t *Tree) Ascend(iterator ItemIterator) { +// t.AscendGreaterOrEqual(Inf(-1), iterator) +//} + +func (t *LLRB) AscendRange(greaterOrEqual, lessThan Item, iterator ItemIterator) { + t.ascendRange(t.root, greaterOrEqual, lessThan, iterator) +} + +func (t *LLRB) ascendRange(h *Node, inf, sup Item, iterator ItemIterator) bool { + if h == nil { + return true + } + if !less(h.Item, sup) { + return t.ascendRange(h.Left, inf, sup, iterator) + } + if less(h.Item, inf) { + return t.ascendRange(h.Right, inf, sup, iterator) + } + + if !t.ascendRange(h.Left, inf, sup, iterator) { + return false + } + if !iterator(h.Item) { + return false + } + return t.ascendRange(h.Right, inf, sup, iterator) +} + +// AscendGreaterOrEqual will call iterator once for each element greater or equal to +// pivot in ascending order. It will stop whenever the iterator returns false. +func (t *LLRB) AscendGreaterOrEqual(pivot Item, iterator ItemIterator) { + t.ascendGreaterOrEqual(t.root, pivot, iterator) +} + +func (t *LLRB) ascendGreaterOrEqual(h *Node, pivot Item, iterator ItemIterator) bool { + if h == nil { + return true + } + if !less(h.Item, pivot) { + if !t.ascendGreaterOrEqual(h.Left, pivot, iterator) { + return false + } + if !iterator(h.Item) { + return false + } + } + return t.ascendGreaterOrEqual(h.Right, pivot, iterator) +} + +func (t *LLRB) AscendLessThan(pivot Item, iterator ItemIterator) { + t.ascendLessThan(t.root, pivot, iterator) +} + +func (t *LLRB) ascendLessThan(h *Node, pivot Item, iterator ItemIterator) bool { + if h == nil { + return true + } + if !t.ascendLessThan(h.Left, pivot, iterator) { + return false + } + if !iterator(h.Item) { + return false + } + if less(h.Item, pivot) { + return t.ascendLessThan(h.Left, pivot, iterator) + } + return true +} + +// DescendLessOrEqual will call iterator once for each element less than the +// pivot in descending order. It will stop whenever the iterator returns false. +func (t *LLRB) DescendLessOrEqual(pivot Item, iterator ItemIterator) { + t.descendLessOrEqual(t.root, pivot, iterator) +} + +func (t *LLRB) descendLessOrEqual(h *Node, pivot Item, iterator ItemIterator) bool { + if h == nil { + return true + } + if less(h.Item, pivot) || !less(pivot, h.Item) { + if !t.descendLessOrEqual(h.Right, pivot, iterator) { + return false + } + if !iterator(h.Item) { + return false + } + } + return t.descendLessOrEqual(h.Left, pivot, iterator) +} diff --git a/vendor/github.com/petar/GoLLRB/llrb/iterator_test.go b/vendor/github.com/petar/GoLLRB/llrb/iterator_test.go new file mode 100644 index 00000000000..db5e12c92eb --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/iterator_test.go @@ -0,0 +1,76 @@ +package llrb + +import ( + "reflect" + "testing" +) + +func TestAscendGreaterOrEqual(t *testing.T) { + tree := New() + tree.InsertNoReplace(Int(4)) + tree.InsertNoReplace(Int(6)) + tree.InsertNoReplace(Int(1)) + tree.InsertNoReplace(Int(3)) + var ary []Item + tree.AscendGreaterOrEqual(Int(-1), func(i Item) bool { + ary = append(ary, i) + return true + }) + expected := []Item{Int(1), Int(3), Int(4), Int(6)} + if !reflect.DeepEqual(ary, expected) { + t.Errorf("expected %v but got %v", expected, ary) + } + ary = nil + tree.AscendGreaterOrEqual(Int(3), func(i Item) bool { + ary = append(ary, i) + return true + }) + expected = []Item{Int(3), Int(4), Int(6)} + if !reflect.DeepEqual(ary, expected) { + t.Errorf("expected %v but got %v", expected, ary) + } + ary = nil + tree.AscendGreaterOrEqual(Int(2), func(i Item) bool { + ary = append(ary, i) + return true + }) + expected = []Item{Int(3), Int(4), Int(6)} + if !reflect.DeepEqual(ary, expected) { + t.Errorf("expected %v but got %v", expected, ary) + } +} + +func TestDescendLessOrEqual(t *testing.T) { + tree := New() + tree.InsertNoReplace(Int(4)) + tree.InsertNoReplace(Int(6)) + tree.InsertNoReplace(Int(1)) + tree.InsertNoReplace(Int(3)) + var ary []Item + tree.DescendLessOrEqual(Int(10), func(i Item) bool { + ary = append(ary, i) + return true + }) + expected := []Item{Int(6), Int(4), Int(3), Int(1)} + if !reflect.DeepEqual(ary, expected) { + t.Errorf("expected %v but got %v", expected, ary) + } + ary = nil + tree.DescendLessOrEqual(Int(4), func(i Item) bool { + ary = append(ary, i) + return true + }) + expected = []Item{Int(4), Int(3), Int(1)} + if !reflect.DeepEqual(ary, expected) { + t.Errorf("expected %v but got %v", expected, ary) + } + ary = nil + tree.DescendLessOrEqual(Int(5), func(i Item) bool { + ary = append(ary, i) + return true + }) + expected = []Item{Int(4), Int(3), Int(1)} + if !reflect.DeepEqual(ary, expected) { + t.Errorf("expected %v but got %v", expected, ary) + } +} diff --git a/vendor/github.com/petar/GoLLRB/llrb/llrb-stats.go b/vendor/github.com/petar/GoLLRB/llrb/llrb-stats.go new file mode 100644 index 00000000000..47126a3be96 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/llrb-stats.go @@ -0,0 +1,46 @@ +// Copyright 2010 Petar Maymounkov. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package llrb + +// GetHeight() returns an item in the tree with key @key, and it's height in the tree +func (t *LLRB) GetHeight(key Item) (result Item, depth int) { + return t.getHeight(t.root, key) +} + +func (t *LLRB) getHeight(h *Node, item Item) (Item, int) { + if h == nil { + return nil, 0 + } + if less(item, h.Item) { + result, depth := t.getHeight(h.Left, item) + return result, depth + 1 + } + if less(h.Item, item) { + result, depth := t.getHeight(h.Right, item) + return result, depth + 1 + } + return h.Item, 0 +} + +// HeightStats() returns the average and standard deviation of the height +// of elements in the tree +func (t *LLRB) HeightStats() (avg, stddev float64) { + av := &avgVar{} + heightStats(t.root, 0, av) + return av.GetAvg(), av.GetStdDev() +} + +func heightStats(h *Node, d int, av *avgVar) { + if h == nil { + return + } + av.Add(float64(d)) + if h.Left != nil { + heightStats(h.Left, d+1, av) + } + if h.Right != nil { + heightStats(h.Right, d+1, av) + } +} diff --git a/vendor/github.com/petar/GoLLRB/llrb/llrb.go b/vendor/github.com/petar/GoLLRB/llrb/llrb.go new file mode 100644 index 00000000000..81373fbfdf0 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/llrb.go @@ -0,0 +1,456 @@ +// Copyright 2010 Petar Maymounkov. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// A Left-Leaning Red-Black (LLRB) implementation of 2-3 balanced binary search trees, +// based on the following work: +// +// http://www.cs.princeton.edu/~rs/talks/LLRB/08Penn.pdf +// http://www.cs.princeton.edu/~rs/talks/LLRB/LLRB.pdf +// http://www.cs.princeton.edu/~rs/talks/LLRB/Java/RedBlackBST.java +// +// 2-3 trees (and the run-time equivalent 2-3-4 trees) are the de facto standard BST +// algoritms found in implementations of Python, Java, and other libraries. The LLRB +// implementation of 2-3 trees is a recent improvement on the traditional implementation, +// observed and documented by Robert Sedgewick. +// +package llrb + +// Tree is a Left-Leaning Red-Black (LLRB) implementation of 2-3 trees +type LLRB struct { + count int + root *Node +} + +type Node struct { + Item + Left, Right *Node // Pointers to left and right child nodes + Black bool // If set, the color of the link (incoming from the parent) is black + // In the LLRB, new nodes are always red, hence the zero-value for node +} + +type Item interface { + Less(than Item) bool +} + +// +func less(x, y Item) bool { + if x == pinf { + return false + } + if x == ninf { + return true + } + return x.Less(y) +} + +// Inf returns an Item that is "bigger than" any other item, if sign is positive. +// Otherwise it returns an Item that is "smaller than" any other item. +func Inf(sign int) Item { + if sign == 0 { + panic("sign") + } + if sign > 0 { + return pinf + } + return ninf +} + +var ( + ninf = nInf{} + pinf = pInf{} +) + +type nInf struct{} + +func (nInf) Less(Item) bool { + return true +} + +type pInf struct{} + +func (pInf) Less(Item) bool { + return false +} + +// New() allocates a new tree +func New() *LLRB { + return &LLRB{} +} + +// SetRoot sets the root node of the tree. +// It is intended to be used by functions that deserialize the tree. +func (t *LLRB) SetRoot(r *Node) { + t.root = r +} + +// Root returns the root node of the tree. +// It is intended to be used by functions that serialize the tree. +func (t *LLRB) Root() *Node { + return t.root +} + +// Len returns the number of nodes in the tree. +func (t *LLRB) Len() int { return t.count } + +// Has returns true if the tree contains an element whose order is the same as that of key. +func (t *LLRB) Has(key Item) bool { + return t.Get(key) != nil +} + +// Get retrieves an element from the tree whose order is the same as that of key. +func (t *LLRB) Get(key Item) Item { + h := t.root + for h != nil { + switch { + case less(key, h.Item): + h = h.Left + case less(h.Item, key): + h = h.Right + default: + return h.Item + } + } + return nil +} + +// Min returns the minimum element in the tree. +func (t *LLRB) Min() Item { + h := t.root + if h == nil { + return nil + } + for h.Left != nil { + h = h.Left + } + return h.Item +} + +// Max returns the maximum element in the tree. +func (t *LLRB) Max() Item { + h := t.root + if h == nil { + return nil + } + for h.Right != nil { + h = h.Right + } + return h.Item +} + +func (t *LLRB) ReplaceOrInsertBulk(items ...Item) { + for _, i := range items { + t.ReplaceOrInsert(i) + } +} + +func (t *LLRB) InsertNoReplaceBulk(items ...Item) { + for _, i := range items { + t.InsertNoReplace(i) + } +} + +// ReplaceOrInsert inserts item into the tree. If an existing +// element has the same order, it is removed from the tree and returned. +func (t *LLRB) ReplaceOrInsert(item Item) Item { + if item == nil { + panic("inserting nil item") + } + var replaced Item + t.root, replaced = t.replaceOrInsert(t.root, item) + t.root.Black = true + if replaced == nil { + t.count++ + } + return replaced +} + +func (t *LLRB) replaceOrInsert(h *Node, item Item) (*Node, Item) { + if h == nil { + return newNode(item), nil + } + + h = walkDownRot23(h) + + var replaced Item + if less(item, h.Item) { // BUG + h.Left, replaced = t.replaceOrInsert(h.Left, item) + } else if less(h.Item, item) { + h.Right, replaced = t.replaceOrInsert(h.Right, item) + } else { + replaced, h.Item = h.Item, item + } + + h = walkUpRot23(h) + + return h, replaced +} + +// InsertNoReplace inserts item into the tree. If an existing +// element has the same order, both elements remain in the tree. +func (t *LLRB) InsertNoReplace(item Item) { + if item == nil { + panic("inserting nil item") + } + t.root = t.insertNoReplace(t.root, item) + t.root.Black = true + t.count++ +} + +func (t *LLRB) insertNoReplace(h *Node, item Item) *Node { + if h == nil { + return newNode(item) + } + + h = walkDownRot23(h) + + if less(item, h.Item) { + h.Left = t.insertNoReplace(h.Left, item) + } else { + h.Right = t.insertNoReplace(h.Right, item) + } + + return walkUpRot23(h) +} + +// Rotation driver routines for 2-3 algorithm + +func walkDownRot23(h *Node) *Node { return h } + +func walkUpRot23(h *Node) *Node { + if isRed(h.Right) && !isRed(h.Left) { + h = rotateLeft(h) + } + + if isRed(h.Left) && isRed(h.Left.Left) { + h = rotateRight(h) + } + + if isRed(h.Left) && isRed(h.Right) { + flip(h) + } + + return h +} + +// Rotation driver routines for 2-3-4 algorithm + +func walkDownRot234(h *Node) *Node { + if isRed(h.Left) && isRed(h.Right) { + flip(h) + } + + return h +} + +func walkUpRot234(h *Node) *Node { + if isRed(h.Right) && !isRed(h.Left) { + h = rotateLeft(h) + } + + if isRed(h.Left) && isRed(h.Left.Left) { + h = rotateRight(h) + } + + return h +} + +// DeleteMin deletes the minimum element in the tree and returns the +// deleted item or nil otherwise. +func (t *LLRB) DeleteMin() Item { + var deleted Item + t.root, deleted = deleteMin(t.root) + if t.root != nil { + t.root.Black = true + } + if deleted != nil { + t.count-- + } + return deleted +} + +// deleteMin code for LLRB 2-3 trees +func deleteMin(h *Node) (*Node, Item) { + if h == nil { + return nil, nil + } + if h.Left == nil { + return nil, h.Item + } + + if !isRed(h.Left) && !isRed(h.Left.Left) { + h = moveRedLeft(h) + } + + var deleted Item + h.Left, deleted = deleteMin(h.Left) + + return fixUp(h), deleted +} + +// DeleteMax deletes the maximum element in the tree and returns +// the deleted item or nil otherwise +func (t *LLRB) DeleteMax() Item { + var deleted Item + t.root, deleted = deleteMax(t.root) + if t.root != nil { + t.root.Black = true + } + if deleted != nil { + t.count-- + } + return deleted +} + +func deleteMax(h *Node) (*Node, Item) { + if h == nil { + return nil, nil + } + if isRed(h.Left) { + h = rotateRight(h) + } + if h.Right == nil { + return nil, h.Item + } + if !isRed(h.Right) && !isRed(h.Right.Left) { + h = moveRedRight(h) + } + var deleted Item + h.Right, deleted = deleteMax(h.Right) + + return fixUp(h), deleted +} + +// Delete deletes an item from the tree whose key equals key. +// The deleted item is return, otherwise nil is returned. +func (t *LLRB) Delete(key Item) Item { + var deleted Item + t.root, deleted = t.delete(t.root, key) + if t.root != nil { + t.root.Black = true + } + if deleted != nil { + t.count-- + } + return deleted +} + +func (t *LLRB) delete(h *Node, item Item) (*Node, Item) { + var deleted Item + if h == nil { + return nil, nil + } + if less(item, h.Item) { + if h.Left == nil { // item not present. Nothing to delete + return h, nil + } + if !isRed(h.Left) && !isRed(h.Left.Left) { + h = moveRedLeft(h) + } + h.Left, deleted = t.delete(h.Left, item) + } else { + if isRed(h.Left) { + h = rotateRight(h) + } + // If @item equals @h.Item and no right children at @h + if !less(h.Item, item) && h.Right == nil { + return nil, h.Item + } + // PETAR: Added 'h.Right != nil' below + if h.Right != nil && !isRed(h.Right) && !isRed(h.Right.Left) { + h = moveRedRight(h) + } + // If @item equals @h.Item, and (from above) 'h.Right != nil' + if !less(h.Item, item) { + var subDeleted Item + h.Right, subDeleted = deleteMin(h.Right) + if subDeleted == nil { + panic("logic") + } + deleted, h.Item = h.Item, subDeleted + } else { // Else, @item is bigger than @h.Item + h.Right, deleted = t.delete(h.Right, item) + } + } + + return fixUp(h), deleted +} + +// Internal node manipulation routines + +func newNode(item Item) *Node { return &Node{Item: item} } + +func isRed(h *Node) bool { + if h == nil { + return false + } + return !h.Black +} + +func rotateLeft(h *Node) *Node { + x := h.Right + if x.Black { + panic("rotating a black link") + } + h.Right = x.Left + x.Left = h + x.Black = h.Black + h.Black = false + return x +} + +func rotateRight(h *Node) *Node { + x := h.Left + if x.Black { + panic("rotating a black link") + } + h.Left = x.Right + x.Right = h + x.Black = h.Black + h.Black = false + return x +} + +// REQUIRE: Left and Right children must be present +func flip(h *Node) { + h.Black = !h.Black + h.Left.Black = !h.Left.Black + h.Right.Black = !h.Right.Black +} + +// REQUIRE: Left and Right children must be present +func moveRedLeft(h *Node) *Node { + flip(h) + if isRed(h.Right.Left) { + h.Right = rotateRight(h.Right) + h = rotateLeft(h) + flip(h) + } + return h +} + +// REQUIRE: Left and Right children must be present +func moveRedRight(h *Node) *Node { + flip(h) + if isRed(h.Left.Left) { + h = rotateRight(h) + flip(h) + } + return h +} + +func fixUp(h *Node) *Node { + if isRed(h.Right) { + h = rotateLeft(h) + } + + if isRed(h.Left) && isRed(h.Left.Left) { + h = rotateRight(h) + } + + if isRed(h.Left) && isRed(h.Right) { + flip(h) + } + + return h +} diff --git a/vendor/github.com/petar/GoLLRB/llrb/llrb_test.go b/vendor/github.com/petar/GoLLRB/llrb/llrb_test.go new file mode 100644 index 00000000000..b7bc9780070 --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/llrb_test.go @@ -0,0 +1,239 @@ +// Copyright 2010 Petar Maymounkov. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package llrb + +import ( + "math" + "math/rand" + "testing" +) + +func TestCases(t *testing.T) { + tree := New() + tree.ReplaceOrInsert(Int(1)) + tree.ReplaceOrInsert(Int(1)) + if tree.Len() != 1 { + t.Errorf("expecting len 1") + } + if !tree.Has(Int(1)) { + t.Errorf("expecting to find key=1") + } + + tree.Delete(Int(1)) + if tree.Len() != 0 { + t.Errorf("expecting len 0") + } + if tree.Has(Int(1)) { + t.Errorf("not expecting to find key=1") + } + + tree.Delete(Int(1)) + if tree.Len() != 0 { + t.Errorf("expecting len 0") + } + if tree.Has(Int(1)) { + t.Errorf("not expecting to find key=1") + } +} + +func TestReverseInsertOrder(t *testing.T) { + tree := New() + n := 100 + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(n - i)) + } + i := 0 + tree.AscendGreaterOrEqual(Int(0), func(item Item) bool { + i++ + if item.(Int) != Int(i) { + t.Errorf("bad order: got %d, expect %d", item.(Int), i) + } + return true + }) +} + +func TestRange(t *testing.T) { + tree := New() + order := []String{ + "ab", "aba", "abc", "a", "aa", "aaa", "b", "a-", "a!", + } + for _, i := range order { + tree.ReplaceOrInsert(i) + } + k := 0 + tree.AscendRange(String("ab"), String("ac"), func(item Item) bool { + if k > 3 { + t.Fatalf("returned more items than expected") + } + i1 := order[k] + i2 := item.(String) + if i1 != i2 { + t.Errorf("expecting %s, got %s", i1, i2) + } + k++ + return true + }) +} + +func TestRandomInsertOrder(t *testing.T) { + tree := New() + n := 1000 + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(perm[i])) + } + j := 0 + tree.AscendGreaterOrEqual(Int(0), func(item Item) bool { + if item.(Int) != Int(j) { + t.Fatalf("bad order") + } + j++ + return true + }) +} + +func TestRandomReplace(t *testing.T) { + tree := New() + n := 100 + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(perm[i])) + } + perm = rand.Perm(n) + for i := 0; i < n; i++ { + if replaced := tree.ReplaceOrInsert(Int(perm[i])); replaced == nil || replaced.(Int) != Int(perm[i]) { + t.Errorf("error replacing") + } + } +} + +func TestRandomInsertSequentialDelete(t *testing.T) { + tree := New() + n := 1000 + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(perm[i])) + } + for i := 0; i < n; i++ { + tree.Delete(Int(i)) + } +} + +func TestRandomInsertDeleteNonExistent(t *testing.T) { + tree := New() + n := 100 + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(perm[i])) + } + if tree.Delete(Int(200)) != nil { + t.Errorf("deleted non-existent item") + } + if tree.Delete(Int(-2)) != nil { + t.Errorf("deleted non-existent item") + } + for i := 0; i < n; i++ { + if u := tree.Delete(Int(i)); u == nil || u.(Int) != Int(i) { + t.Errorf("delete failed") + } + } + if tree.Delete(Int(200)) != nil { + t.Errorf("deleted non-existent item") + } + if tree.Delete(Int(-2)) != nil { + t.Errorf("deleted non-existent item") + } +} + +func TestRandomInsertPartialDeleteOrder(t *testing.T) { + tree := New() + n := 100 + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(perm[i])) + } + for i := 1; i < n-1; i++ { + tree.Delete(Int(i)) + } + j := 0 + tree.AscendGreaterOrEqual(Int(0), func(item Item) bool { + switch j { + case 0: + if item.(Int) != Int(0) { + t.Errorf("expecting 0") + } + case 1: + if item.(Int) != Int(n-1) { + t.Errorf("expecting %d", n-1) + } + } + j++ + return true + }) +} + +func TestRandomInsertStats(t *testing.T) { + tree := New() + n := 100000 + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.ReplaceOrInsert(Int(perm[i])) + } + avg, _ := tree.HeightStats() + expAvg := math.Log2(float64(n)) - 1.5 + if math.Abs(avg-expAvg) >= 2.0 { + t.Errorf("too much deviation from expected average height") + } +} + +func BenchmarkInsert(b *testing.B) { + tree := New() + for i := 0; i < b.N; i++ { + tree.ReplaceOrInsert(Int(b.N - i)) + } +} + +func BenchmarkDelete(b *testing.B) { + b.StopTimer() + tree := New() + for i := 0; i < b.N; i++ { + tree.ReplaceOrInsert(Int(b.N - i)) + } + b.StartTimer() + for i := 0; i < b.N; i++ { + tree.Delete(Int(i)) + } +} + +func BenchmarkDeleteMin(b *testing.B) { + b.StopTimer() + tree := New() + for i := 0; i < b.N; i++ { + tree.ReplaceOrInsert(Int(b.N - i)) + } + b.StartTimer() + for i := 0; i < b.N; i++ { + tree.DeleteMin() + } +} + +func TestInsertNoReplace(t *testing.T) { + tree := New() + n := 1000 + for q := 0; q < 2; q++ { + perm := rand.Perm(n) + for i := 0; i < n; i++ { + tree.InsertNoReplace(Int(perm[i])) + } + } + j := 0 + tree.AscendGreaterOrEqual(Int(0), func(item Item) bool { + if item.(Int) != Int(j/2) { + t.Fatalf("bad order") + } + j++ + return true + }) +} diff --git a/vendor/github.com/petar/GoLLRB/llrb/util.go b/vendor/github.com/petar/GoLLRB/llrb/util.go new file mode 100644 index 00000000000..63dbdb2df0a --- /dev/null +++ b/vendor/github.com/petar/GoLLRB/llrb/util.go @@ -0,0 +1,17 @@ +// Copyright 2010 Petar Maymounkov. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package llrb + +type Int int + +func (x Int) Less(than Item) bool { + return x < than.(Int) +} + +type String string + +func (x String) Less(than Item) bool { + return x < than.(String) +}