From 4487e56bae466a27dcdf77b796b8c4a930a2d480 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:43:22 +0100 Subject: [PATCH 01/14] chore: Use golang 1.14.4 --- .travis.yml | 2 +- Makefile | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index b473f48c1f..d0200e065f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ language: go # The version used here should match the BUILD_IMAGE variable in the # Makefile. go: - - "1.13.7" + - "1.14.4" # Enable building in Travis using forked repos. go_import_path: sigs.k8s.io/kubefed diff --git a/Makefile b/Makefile index 4f09e0ad07..312e99b0b7 100644 --- a/Makefile +++ b/Makefile @@ -43,7 +43,7 @@ endif BUILDMNT = /go/src/$(GOTARGET) # The version here should match the version of go configured in # .travis.yml -BUILD_IMAGE ?= golang:1.13.7 +BUILD_IMAGE ?= golang:1.14.4 HYPERFED_TARGET = bin/hyperfed CONTROLLER_TARGET = bin/controller-manager @@ -56,6 +56,7 @@ LDFLAG_OPTIONS = -ldflags "-X sigs.k8s.io/kubefed/pkg/version.version=$(GIT_VERS -X sigs.k8s.io/kubefed/pkg/version.gitTreeState=$(GIT_TREESTATE) \ -X sigs.k8s.io/kubefed/pkg/version.buildDate=$(BUILDDATE)" +export GOPATH ?= $(shell go env GOPATH) GO_BUILDCMD = CGO_ENABLED=0 go build $(VERBOSE_FLAG) $(LDFLAG_OPTIONS) TESTARGS ?= $(VERBOSE_FLAG) -timeout 60s @@ -128,9 +129,6 @@ e2e: $(E2E_BINARY_TARGET) # Generate code generate-code: controller-gen -ifndef GOPATH - $(error GOPATH not defined, please define GOPATH. Run "go help gopath" to learn more about GOPATH) -endif controller-gen object:headerFile=./hack/boilerplate.go.txt paths="./..." generate: generate-code kubefedctl From c415d4fd32f9ca88def06896ea4d2f5655cc4205 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:45:44 +0100 Subject: [PATCH 02/14] chore: Add interactive and tty flag to docker run to be able to interrupt build --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 312e99b0b7..51963bdbce 100644 --- a/Makefile +++ b/Makefile @@ -64,7 +64,7 @@ TEST_PKGS ?= $(GOTARGET)/cmd/... $(GOTARGET)/pkg/... TEST_CMD = go test $(TESTARGS) TEST = $(TEST_CMD) $(TEST_PKGS) -DOCKER_BUILD ?= $(DOCKER) run --rm -v $(DIR):$(BUILDMNT) -w $(BUILDMNT) $(BUILD_IMAGE) /bin/sh -c +DOCKER_BUILD ?= $(DOCKER) run -it --rm -v $(DIR):$(BUILDMNT) -w $(BUILDMNT) $(BUILD_IMAGE) /bin/sh -c # TODO (irfanurrehman): can add local compile, and auto-generate targets also if needed .PHONY: all container push clean hyperfed controller kubefedctl test local-test vet fmt build bindir generate webhook e2e From 864d5ce8ff1b35329d001dadc49734e0a452bbf6 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:50:17 +0100 Subject: [PATCH 03/14] feat: Upgrade dependencies to kubernetes 1.18.5 release --- go.mod | 20 +- go.sum | 199 +++++++++++------- .../kubefedcluster/clusterclient.go | 5 +- .../controller_integration_test.go | 15 +- pkg/controller/status/controller.go | 5 +- pkg/controller/util/genericinformer.go | 5 +- pkg/controller/util/resourceinformer.go | 6 +- pkg/kubefedctl/disable.go | 4 +- pkg/kubefedctl/enable/enable.go | 8 +- pkg/kubefedctl/enable/schema.go | 3 +- pkg/kubefedctl/federate/federate.go | 4 +- pkg/kubefedctl/federate/util.go | 3 +- pkg/kubefedctl/join.go | 79 ++++--- pkg/kubefedctl/orphaning/disable.go | 3 +- pkg/kubefedctl/orphaning/enable.go | 3 +- pkg/kubefedctl/orphaning/orphaning.go | 3 +- pkg/kubefedctl/unjoin.go | 24 ++- pkg/schedulingtypes/plugin.go | 5 +- scripts/pre-commit.sh | 2 +- test/common/crudtester.go | 24 +-- test/common/dns.go | 3 +- test/common/resource_helper.go | 3 +- test/e2e/crd.go | 7 +- test/e2e/federate.go | 8 +- test/e2e/framework/framework.go | 2 +- test/e2e/framework/unmanaged.go | 8 +- test/e2e/ftccontroller.go | 4 +- test/e2e/ingressdns.go | 4 +- test/e2e/placement.go | 2 +- test/e2e/scheduling.go | 6 +- test/e2e/servicedns.go | 6 +- test/e2e/version.go | 12 +- 32 files changed, 292 insertions(+), 193 deletions(-) diff --git a/go.mod b/go.mod index bb176e4580..1fd8ee95be 100644 --- a/go.mod +++ b/go.mod @@ -15,15 +15,15 @@ require ( github.com/spf13/cobra v0.0.5 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.4.0 - k8s.io/api v0.17.3 - k8s.io/apiextensions-apiserver v0.17.3 - k8s.io/apimachinery v0.17.3 - k8s.io/apiserver v0.17.3 - k8s.io/client-go v0.17.3 - k8s.io/component-base v0.17.3 + k8s.io/api v0.18.5 + k8s.io/apiextensions-apiserver v0.18.5 + k8s.io/apimachinery v0.18.5 + k8s.io/apiserver v0.18.5 + k8s.io/client-go v0.18.5 + k8s.io/component-base v0.18.5 k8s.io/klog v1.0.0 - k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a - k8s.io/kubectl v0.17.3 - sigs.k8s.io/controller-runtime v0.5.0 - sigs.k8s.io/yaml v1.1.0 + k8s.io/kube-openapi v0.0.0-20200410145947-61e04a5be9a6 + k8s.io/kubectl v0.18.5 + sigs.k8s.io/controller-runtime v0.6.0 + sigs.k8s.io/yaml v1.2.0 ) diff --git a/go.sum b/go.sum index 99737dcbd3..822c2b4faf 100644 --- a/go.sum +++ b/go.sum @@ -10,13 +10,14 @@ github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEg github.com/Azure/go-autorest/autorest/date v0.1.0 h1:YGrhWfrgtFs84+h0o46rJrlmsZtyZRg470CqAXTZaGM= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.2.0 h1:Ww5g4zThfD/6cLb4z6xxgeyDa7QDkizMkJKe0ysZXp0= github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY= github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/MakeNowJust/heredoc v0.0.0-20170808103936-bb23615498cd/go.mod h1:64YHyfSL2R96J44Nlwm39UHepQbyR5q10x7iYa1ks2E= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46 h1:lsxEuwrXEAokXB9qhlbKWPpo3KMLZQ5WB5WLQRW1uq0= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= @@ -40,13 +41,16 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/blang/semver v3.5.0+incompatible h1:CGxCgetQ64DKk7rdZ++Vfnb1+ogGNnB17OJKJXD2Cfs= github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/chai2010/gettext-go v0.0.0-20160711120539-c6fed771bfd5/go.mod h1:/iP1qXHoty45bqomnu2LM+VVyAEdWN+vtSHGlQgyxbw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa h1:OaNxuTZr7kxeODyLWsRMC+OD03aFUH+mW6r2d+MWa5Y= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8= @@ -56,7 +60,6 @@ github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea h1:n2Ltr3SrfQlf/9nOna1D github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -70,11 +73,14 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= +github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.5.0+incompatible h1:ouOWdg56aJriqS0huScTkVXPC5IcNrDCXZ6OoTAWu7M= github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= @@ -82,6 +88,8 @@ github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZM github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -143,12 +151,13 @@ github.com/go-openapi/validate v0.19.5/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85n github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d h1:3PaI8p3seN09VjbTYC/QWlUZdZ1qS1zGjy7LH2Wt07I= -github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7 h1:u4bArs140e9+AfE52mFHOXVFnOSBJBRlzTHrOPLOIhE= -github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef h1:veQD95Isof8w9/WXiA+pa3tz3fJXkt5B7QaRBrM62gk= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -156,17 +165,29 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golangplus/bytes v0.0.0-20160111154220-45c989fe5450/go.mod h1:Bk6SMAONeMXrxql8uvOKuAZSu8aM5RUGv+1C6IJaEho= github.com/golangplus/fmt v0.0.0-20150411045040-2a5d6d7d2995/go.mod h1:lJgMEyOkYFkPcDKwRXegd+iM6E7matEszMG5HhwytU8= github.com/golangplus/testing v0.0.0-20180327235837-af21d9c3145e/go.mod h1:0AA//k/eakGydO4jKRoRL2j92ZKSzTgj9tclaCrvXHk= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= +github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -174,16 +195,20 @@ github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= +github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/googleapis/gnostic v0.3.1 h1:WeAefnSUHlBb0iJKwxFDZdbfGwkd7xRNuV+IpXMJhYk= github.com/googleapis/gnostic v0.3.1/go.mod h1:on+2t9HRStVgn95RSsFWFz+6Q0Snyqv1awfrALZdbtU= github.com/gophercloud/gophercloud v0.1.0 h1:P/nh25+rzXouhytV2pUHBb65fnds26Ghl8/391+sT5o= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4 h1:z53tR0945TRRQO/fLEVPI6SMv7ZflF0TEaTAoU7tOzg= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.5 h1:UImYN5qQ8tuGpGE16ZmjvcTtTw24zw1QAp/SlnNrZhI= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= @@ -194,9 +219,10 @@ github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpO github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.6 h1:xTNEAn+kxVO7dTZGu0CegyqKZmoWFI0rF8UxjlB2d28= github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -207,11 +233,14 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc= @@ -234,7 +263,6 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= @@ -243,19 +271,21 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.0 h1:Iw5WCbBcaAAd0fpRb1c9r5YCylv4XDoCSigm1zLevwU= -github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.13.0 h1:M76yO2HkZASFjXL0HSoZJ1AYEmQxNJmY41Jx1zNUq1Y= +github.com/onsi/ginkgo v1.13.0/go.mod h1:+REjRxOmWfHCjfv9TTWB1jD1Frx4XydAD3zm1lskyM0= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= -github.com/onsi/gomega v1.9.0 h1:R1uwffexN6Pr340GtYRIdZmAiN4J+iw6WG4wog1DUXg= -github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/openshift/generic-admission-server v1.14.0 h1:GAQy5JNVcbmUuIpPvLd39+2rPecxEm7WQ2sP7ACrse4= github.com/openshift/generic-admission-server v1.14.0/go.mod h1:GD9KN/W4KxqRQGVMbqQHpHzb2XcQVvLCaBaSciqXvfM= @@ -267,7 +297,6 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= @@ -277,19 +306,25 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90 h1:S/YWwWx/RA8rT8tKFRuGUZhuA90OyIBpPCXkcbwU8DE= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1 h1:K0MGApIoQvMw27RTdJkPbr3JZ7DNbtxQNyi5STVM6Kw= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2 h1:6LJUbpNm42llc4HRCuvApCSWB/WfhuNo9K98Q9sNGfs= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/soheilhy/cmux v0.1.4 h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= @@ -305,19 +340,21 @@ github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DM github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8 h1:ndzgwNDnKIqyCvHTXaCqh9KlOWKvBry6nuXMJmonVsE= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xlab/handysort v0.0.0-20150421192137-fb3537ed64a1/go.mod h1:QcJo0QPSfTONNIgpN5RA8prR7fF8nkF6cTWTcNerRO8= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +go.etcd.io/bbolt v1.3.3 h1:MUGmc65QhB3pIlaQ5bB4LwqSj6GIonVJXpZiaKNyaKk= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738 h1:VcrIfasaLFkyjk6KNlXQSzO+B0fZcnECiDrKJsfxka0= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= @@ -327,6 +364,8 @@ go.mongodb.org/mongo-driver v1.1.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qL go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.uber.org/atomic v1.3.2 h1:2Oa65PReHzfn29GpvgsYwloV9AVFHPDk8tYxt2c2tr4= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0 h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/zap v1.10.0 h1:ORx85nbTijNz8ljznvCMR1ZBIPKFn3jQrag10X2AsuM= @@ -338,17 +377,13 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586 h1:7KByu05hhLed2MO29w7p1XfZvZ13m8mub3shuVftRs0= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975 h1:/Tl7pH94bvbAAHBdZJT947M/+gp0+CqQXDtMRC0fseo= +golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -367,6 +402,8 @@ golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191004110552-13f9640d40b9 h1:rjwSpXsdiK0dV8/Naq3kAw9ymfAeJIyd0upUIElB+lI= golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7 h1:AeiKBIuRw3UomYXSbLy0Mc2dDLfdtbT/IVn4keq83P0= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0= @@ -376,6 +413,8 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -385,14 +424,19 @@ golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190321052220-f7bb7a8bee54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e h1:N7DeIrjYszNmSW409R3frPPwglRwMkXSBzwVbkOjLLA= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299 h1:DYfZAGf2WMFjMxbgTjaC+2HC7NkNAQs+6Q8b9WEB/F4= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -408,10 +452,8 @@ golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= @@ -419,25 +461,38 @@ golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gomodules.xyz/jsonpatch/v2 v2.0.1 h1:xyiBuvkD2g5n7cYzx6u2sxQvsAy4QJsZFCzGVdzOXZ0= gomodules.xyz/jsonpatch/v2 v2.0.1/go.mod h1:IhYNNY4jnS53ZnfE4PAmpKtDpTCj1JFXc+3mwe7XcUU= -gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0= -gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= -gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0 h1:KxkO13IPW4Lslp2bz+KHP2E3gtFlrIGNThxkZQ3g+4c= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873 h1:nfPFGzJkUDX6uBmpN/pSw7MbOAWegH5QDQuoXFHedLg= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk= google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0 h1:2dTRdpdFEEhJYQD8EMLB61nnrzSCTbG38PhqdhvOltg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= @@ -455,55 +510,57 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -k8s.io/api v0.17.2/go.mod h1:BS9fjjLc4CMuqfSO8vgbHPKMt5+SF0ET6u/RVDihTo4= -k8s.io/api v0.17.3 h1:XAm3PZp3wnEdzekNkcmj/9Y1zdmQYJ1I4GKSBBZ8aG0= -k8s.io/api v0.17.3/go.mod h1:YZ0OTkuw7ipbe305fMpIdf3GLXZKRigjtZaV5gzC2J0= -k8s.io/apiextensions-apiserver v0.17.2/go.mod h1:4KdMpjkEjjDI2pPfBA15OscyNldHWdBCfsWMDWAmSTs= -k8s.io/apiextensions-apiserver v0.17.3 h1:WDZWkPcbgvchEdDd7ysL21GGPx3UKZQLDZXEkevT6n4= -k8s.io/apiextensions-apiserver v0.17.3/go.mod h1:CJbCyMfkKftAd/X/V6OTHYhVn7zXnDdnkUjS1h0GTeY= -k8s.io/apimachinery v0.17.2/go.mod h1:b9qmWdKlLuU9EBh+06BtLcSf/Mu89rWL33naRxs1uZg= -k8s.io/apimachinery v0.17.3 h1:f+uZV6rm4/tHE7xXgLyToprg6xWairaClGVkm2t8omg= -k8s.io/apimachinery v0.17.3/go.mod h1:gxLnyZcGNdZTCLnq3fgzyg2A5BVCHTNDFrw8AmuJ+0g= -k8s.io/apiserver v0.17.2/go.mod h1:lBmw/TtQdtxvrTk0e2cgtOxHizXI+d0mmGQURIHQZlo= -k8s.io/apiserver v0.17.3 h1:faZbSuFtJ4dx09vctKZGHms/7bp3qFtbqb10Swswqfs= -k8s.io/apiserver v0.17.3/go.mod h1:iJtsPpu1ZpEnHaNawpSV0nYTGBhhX2dUlnn7/QS7QiY= -k8s.io/cli-runtime v0.17.3/go.mod h1:X7idckYphH4SZflgNpOOViSxetiMj6xI0viMAjM81TA= -k8s.io/client-go v0.17.2/go.mod h1:QAzRgsa0C2xl4/eVpeVAZMvikCn8Nm81yqVx3Kk9XYI= -k8s.io/client-go v0.17.3 h1:deUna1Ksx05XeESH6XGCyONNFfiQmDdqeqUvicvP6nU= -k8s.io/client-go v0.17.3/go.mod h1:cLXlTMtWHkuK4tD360KpWz2gG2KtdWEr/OT02i3emRQ= -k8s.io/code-generator v0.17.2/go.mod h1:DVmfPQgxQENqDIzVR2ddLXMH34qeszkKSdH/N+s+38s= -k8s.io/code-generator v0.17.3/go.mod h1:l8BLVwASXQZTo2xamW5mQNFCe1XPiAesVq7Y1t7PiQQ= -k8s.io/component-base v0.17.2/go.mod h1:zMPW3g5aH7cHJpKYQ/ZsGMcgbsA/VyhEugF3QT1awLs= -k8s.io/component-base v0.17.3 h1:hQzTSshY14aLSR6WGIYvmw+w+u6V4d+iDR2iDGMrlUg= -k8s.io/component-base v0.17.3/go.mod h1:GeQf4BrgelWm64PXkIXiPh/XS0hnO42d9gx9BtbZRp8= +k8s.io/api v0.18.2/go.mod h1:SJCWI7OLzhZSvbY7U8zwNl9UA4o1fizoug34OV/2r78= +k8s.io/api v0.18.5 h1:fKbCxr+U3fu7k6jB+QeYPD/c6xKYeSJ2KVWmyUypuWM= +k8s.io/api v0.18.5/go.mod h1:tN+e/2nbdGKOAH55NMV8oGrMG+3uRlA9GaRfvnCCSNk= +k8s.io/apiextensions-apiserver v0.18.2/go.mod h1:q3faSnRGmYimiocj6cHQ1I3WpLqmDgJFlKL37fC4ZvY= +k8s.io/apiextensions-apiserver v0.18.5 h1:pvbXjB/BRXZiO+/Erp5Pxr+lnhDCv5uxNxHh3FLGZ/g= +k8s.io/apiextensions-apiserver v0.18.5/go.mod h1:woZ7PkEIMHjhHIyApvOwkGOkBLUYKuet0VWVkPTQ/Fs= +k8s.io/apimachinery v0.18.2/go.mod h1:9SnR/e11v5IbyPCGbvJViimtJ0SwHG4nfZFjU77ftcA= +k8s.io/apimachinery v0.18.5 h1:Lh6tgsM9FMkC12K5T5QjRm7rDs6aQN5JHkA0JomULDM= +k8s.io/apimachinery v0.18.5/go.mod h1:OaXp26zu/5J7p0f92ASynJa1pZo06YlV9fG7BoWbCko= +k8s.io/apiserver v0.18.2/go.mod h1:Xbh066NqrZO8cbsoenCwyDJ1OSi8Ag8I2lezeHxzwzw= +k8s.io/apiserver v0.18.5 h1:+kk0J2Qf6xd8a5rxcNpf80jwCF99KUWA7tpvctTWObc= +k8s.io/apiserver v0.18.5/go.mod h1:+1XgOMq7YJ3OyqPNSJ54EveHwCoBWcJT9CaPycYI5ps= +k8s.io/cli-runtime v0.18.5/go.mod h1:uS210tk6ngtwwIJctPLs4ul1r7XlrEtwh9dA1oB700A= +k8s.io/client-go v0.18.2/go.mod h1:Xcm5wVGXX9HAA2JJ2sSBUn3tCJ+4SVlCbl2MNNv+CIU= +k8s.io/client-go v0.18.5 h1:cLhGZdOmyPhwtt20Lrb7uAqxxB1uvY+NTmNJvno1oKA= +k8s.io/client-go v0.18.5/go.mod h1:EsiD+7Fx+bRckKWZXnAXRKKetm1WuzPagH4iOSC8x58= +k8s.io/code-generator v0.18.2/go.mod h1:+UHX5rSbxmR8kzS+FAv7um6dtYrZokQvjHpDSYRVkTc= +k8s.io/code-generator v0.18.5/go.mod h1:TgNEVx9hCyPGpdtCWA34olQYLkh3ok9ar7XfSsr8b6c= +k8s.io/component-base v0.18.2/go.mod h1:kqLlMuhJNHQ9lz8Z7V5bxUUtjFZnrypArGl58gmDfUM= +k8s.io/component-base v0.18.5 h1:QIz2xFax8W5XQREoNcP/MHgnt4ClgfZ837Qx9yCeCzA= +k8s.io/component-base v0.18.5/go.mod h1:RSbcboNk4B+S8Acs2JaBOVW3XNz1+A637s2jL+QQrlU= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo v0.0.0-20190822140433-26a664648505/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= +k8s.io/gengo v0.0.0-20200114144118-36b2048a9120/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= -k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a h1:UcxjrRMyNx/i/y8G7kPvLyy7rfbeuf1PYyBf973pgyU= -k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E= -k8s.io/kubectl v0.17.3 h1:9HHYj07kuFkM+sMJMOyQX29CKWq4lvKAG1UIPxNPMQ4= -k8s.io/kubectl v0.17.3/go.mod h1:NUn4IBY7f7yCMwSop2HCXlw/MVYP4HJBiUmOR3n9w28= -k8s.io/metrics v0.17.3/go.mod h1:HEJGy1fhHOjHggW9rMDBJBD3YuGroH3Y1pnIRw9FFaI= -k8s.io/utils v0.0.0-20191114184206-e782cd3c129f h1:GiPwtSzdP43eI1hpPCbROQCCIgCuiMMNF8YUVLF3vJo= -k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= -modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw= -modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk= -modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k= -modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs= -modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I= -sigs.k8s.io/controller-runtime v0.5.0 h1:CbqIy5fbUX+4E9bpnBFd204YAzRYlM9SWW77BbrcDQo= -sigs.k8s.io/controller-runtime v0.5.0/go.mod h1:REiJzC7Y00U+2YkMbT8wxgrsX5USpXKGhb2sCtAXiT8= +k8s.io/kube-openapi v0.0.0-20200121204235-bf4fb3bd569c/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E= +k8s.io/kube-openapi v0.0.0-20200410145947-61e04a5be9a6 h1:Oh3Mzx5pJ+yIumsAD0MOECPVeXsVot0UkiaCGVyfGQY= +k8s.io/kube-openapi v0.0.0-20200410145947-61e04a5be9a6/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E= +k8s.io/kubectl v0.18.5 h1:htctXnWqcF1VBkuzbWINqnwx/rM7byH9o2ZuHntlbJo= +k8s.io/kubectl v0.18.5/go.mod h1:LAGxvYunNuwcZst0OAMXnInFIv81/IeoAz2N1Yh+AhU= +k8s.io/metrics v0.18.5/go.mod h1:pqn6YiCCxUt067ivZVo4KtvppvdykV6HHG5+7ygVkNg= +k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89 h1:d4vVOjXm687F1iLSP2q3lyPPuyvTUt3aVoBpi2DqRsU= +k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.7 h1:uuHDyjllyzRyCIvvn0OBjiRB0SgBZGqHNYAmjR7fO50= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.7/go.mod h1:PHgbrJT7lCHcxMU+mDHEm+nx46H4zuuHZkDP6icnhu0= +sigs.k8s.io/controller-runtime v0.6.0 h1:Fzna3DY7c4BIP6KwfSlrfnj20DJ+SeMBK8HSFvOk9NM= +sigs.k8s.io/controller-runtime v0.6.0/go.mod h1:CpYf5pdNY/B352A1TFLAS2JVSlnGQ5O2cftPHndTroo= sigs.k8s.io/kustomize v2.0.3+incompatible/go.mod h1:MkjgH3RdOWrievjo6c9T245dYlB5QeXV4WCbnt/PEpU= -sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI= -sigs.k8s.io/structured-merge-diff v1.0.1-0.20191108220359-b1b620dd3f06 h1:zD2IemQ4LmOcAumeiyDWXKUI2SO0NYDe3H6QGvPOVgU= -sigs.k8s.io/structured-merge-diff v1.0.1-0.20191108220359-b1b620dd3f06/go.mod h1:/ULNhyfzRopfcjskuui0cTITekDduZ7ycKN3oUT9R18= +sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= +sigs.k8s.io/structured-merge-diff/v3 v3.0.0 h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E= +sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= sigs.k8s.io/yaml v1.1.0 h1:4A07+ZFc2wgJwo8YNlQpr1rVlgUDlxXHhPJciaPY5gs= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= vbom.ml/util v0.0.0-20160121211510-db5cfe13f5cc/go.mod h1:so/NYdZXCz+E3ZpW0uAoCj6uzU2+8OWDFv/HxUSs7kI= diff --git a/pkg/controller/kubefedcluster/clusterclient.go b/pkg/controller/kubefedcluster/clusterclient.go index 9272c67e47..3fc98deeb6 100644 --- a/pkg/controller/kubefedcluster/clusterclient.go +++ b/pkg/controller/kubefedcluster/clusterclient.go @@ -17,6 +17,7 @@ limitations under the License. package kubefedcluster import ( + "context" "strings" "time" @@ -125,7 +126,7 @@ func (self *ClusterClient) GetClusterHealthStatus() (*fedv1b1.KubeFedClusterStat LastProbeTime: currentTime, LastTransitionTime: ¤tTime, } - body, err := self.kubeClient.DiscoveryClient.RESTClient().Get().AbsPath("/healthz").Do().Raw() + body, err := self.kubeClient.DiscoveryClient.RESTClient().Get().AbsPath("/healthz").Do(context.Background()).Raw() if err != nil { runtime.HandleError(errors.Wrapf(err, "Failed to do cluster health check for cluster %q", self.clusterName)) clusterStatus.Conditions = append(clusterStatus.Conditions, newClusterOfflineCondition) @@ -145,7 +146,7 @@ func (self *ClusterClient) GetClusterHealthStatus() (*fedv1b1.KubeFedClusterStat // GetClusterZones gets the kubernetes cluster zones and region by inspecting labels on nodes in the cluster. func (self *ClusterClient) GetClusterZones() ([]string, string, error) { - nodes, err := self.kubeClient.CoreV1().Nodes().List(metav1.ListOptions{}) + nodes, err := self.kubeClient.CoreV1().Nodes().List(context.Background(), metav1.ListOptions{}) if err != nil { klog.Errorf("Failed to list nodes while getting zone names: %v", err) return nil, "", err diff --git a/pkg/controller/kubefedcluster/controller_integration_test.go b/pkg/controller/kubefedcluster/controller_integration_test.go index d3375c9859..e9e8adcaef 100644 --- a/pkg/controller/kubefedcluster/controller_integration_test.go +++ b/pkg/controller/kubefedcluster/controller_integration_test.go @@ -79,6 +79,17 @@ var _ = BeforeSuite(func(done Done) { Expect(err).NotTo(HaveOccurred()) Expect(k8sClient).ToNot(BeNil()) + _, err = clientset.CoreV1().Namespaces().Create( + context.Background(), + &corev1.Namespace{ + ObjectMeta: metav1.ObjectMeta{ + Name: util.DefaultKubeFedSystemNamespace, + }, + }, + metav1.CreateOptions{}, + ) + Expect(err).ToNot(HaveOccurred()) + config = &util.ClusterHealthCheckConfig{ Period: 10 * time.Second, FailureThreshold: 3, @@ -121,7 +132,9 @@ var _ = Describe("TestKubefedClusterController", func() { }, } - _, err := clientset.CoreV1().Secrets(util.DefaultKubeFedSystemNamespace).Create(kubefedClusterSecret) + _, err := clientset.CoreV1().Secrets(util.DefaultKubeFedSystemNamespace).Create( + context.Background(), kubefedClusterSecret, metav1.CreateOptions{}, + ) Expect(err).ToNot(HaveOccurred()) kc := &fedv1b1.KubeFedCluster{ diff --git a/pkg/controller/status/controller.go b/pkg/controller/status/controller.go index f8bd25c543..558be59092 100644 --- a/pkg/controller/status/controller.go +++ b/pkg/controller/status/controller.go @@ -17,6 +17,7 @@ limitations under the License. package status import ( + "context" "fmt" "reflect" "sort" @@ -302,7 +303,7 @@ func (s *KubeFedStatusController) reconcile(qualifiedName util.QualifiedName) ut } if existingStatus == nil { - _, err = s.statusClient.Resources(qualifiedName.Namespace).Create(status, metav1.CreateOptions{}) + _, err = s.statusClient.Resources(qualifiedName.Namespace).Create(context.Background(), status, metav1.CreateOptions{}) if err != nil { runtime.HandleError(errors.Wrapf(err, "Failed to create status object for federated type %s %q", statusKind, key)) return util.StatusNeedsRecheck @@ -312,7 +313,7 @@ func (s *KubeFedStatusController) reconcile(qualifiedName util.QualifiedName) ut status.Object["clusterStatus"] = make([]util.ResourceClusterStatus, 0) } existingStatus.Object["clusterStatus"] = status.Object["clusterStatus"] - _, err = s.statusClient.Resources(qualifiedName.Namespace).Update(existingStatus, metav1.UpdateOptions{}) + _, err = s.statusClient.Resources(qualifiedName.Namespace).Update(context.Background(), existingStatus, metav1.UpdateOptions{}) if err != nil { runtime.HandleError(errors.Wrapf(err, "Failed to update status object for federated type %s %q", statusKind, key)) return util.StatusNeedsRecheck diff --git a/pkg/controller/util/genericinformer.go b/pkg/controller/util/genericinformer.go index 3bdfe9aaff..d291523716 100644 --- a/pkg/controller/util/genericinformer.go +++ b/pkg/controller/util/genericinformer.go @@ -17,6 +17,7 @@ limitations under the License. package util import ( + "context" "time" "github.com/pkg/errors" @@ -68,14 +69,14 @@ func NewGenericInformerWithEventHandler(config *rest.Config, namespace string, o ListFunc: func(opts metav1.ListOptions) (pkgruntime.Object, error) { res := listObj.DeepCopyObject() isNamespaceScoped := namespace != "" && mapping.Scope.Name() != meta.RESTScopeNameRoot - err := client.Get().NamespaceIfScoped(namespace, isNamespaceScoped).Resource(mapping.Resource.Resource).VersionedParams(&opts, scheme.ParameterCodec).Do().Into(res) + err := client.Get().NamespaceIfScoped(namespace, isNamespaceScoped).Resource(mapping.Resource.Resource).VersionedParams(&opts, scheme.ParameterCodec).Do(context.Background()).Into(res) return res, err }, WatchFunc: func(opts metav1.ListOptions) (watch.Interface, error) { // Watch needs to be set to true separately opts.Watch = true isNamespaceScoped := namespace != "" && mapping.Scope.Name() != meta.RESTScopeNameRoot - return client.Get().NamespaceIfScoped(namespace, isNamespaceScoped).Resource(mapping.Resource.Resource).VersionedParams(&opts, scheme.ParameterCodec).Watch() + return client.Get().NamespaceIfScoped(namespace, isNamespaceScoped).Resource(mapping.Resource.Resource).VersionedParams(&opts, scheme.ParameterCodec).Watch(context.Background()) }, }, obj, diff --git a/pkg/controller/util/resourceinformer.go b/pkg/controller/util/resourceinformer.go index 30b785743e..3bc21b168d 100644 --- a/pkg/controller/util/resourceinformer.go +++ b/pkg/controller/util/resourceinformer.go @@ -17,6 +17,8 @@ limitations under the License. package util import ( + "context" + "github.com/pkg/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -52,11 +54,11 @@ func newResourceInformer(client ResourceClient, namespace string, apiResource *m &cache.ListWatch{ ListFunc: func(options metav1.ListOptions) (pkgruntime.Object, error) { options.LabelSelector = labelSelector - return client.Resources(namespace).List(options) + return client.Resources(namespace).List(context.Background(), options) }, WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) { options.LabelSelector = labelSelector - return client.Resources(namespace).Watch(options) + return client.Resources(namespace).Watch(context.Background(), options) }, }, obj, // use an unstructured type with apiVersion / kind populated for informer logging purposes diff --git a/pkg/kubefedctl/disable.go b/pkg/kubefedctl/disable.go index 1c23b18776..e99e0e5018 100644 --- a/pkg/kubefedctl/disable.go +++ b/pkg/kubefedctl/disable.go @@ -351,7 +351,7 @@ func customResourcesExist(config *rest.Config, resource *metav1.APIResource) (bo } options := metav1.ListOptions{} - objList, err := client.Resources("").List(options) + objList, err := client.Resources("").List(context.Background(), options) if apierrors.IsNotFound(err) { return false, nil } else if err != nil { @@ -366,7 +366,7 @@ func deleteFederatedCRD(config *rest.Config, crdName string, write func(string)) return errors.Wrap(err, "Error creating crd client") } - err = client.CustomResourceDefinitions().Delete(crdName, nil) + err = client.CustomResourceDefinitions().Delete(context.Background(), crdName, metav1.DeleteOptions{}) if apierrors.IsNotFound(err) { write(fmt.Sprintf("customresourcedefinition %q does not exist\n", crdName)) } else if err != nil { diff --git a/pkg/kubefedctl/enable/enable.go b/pkg/kubefedctl/enable/enable.go index ea5c8b114b..e73648a9c9 100644 --- a/pkg/kubefedctl/enable/enable.go +++ b/pkg/kubefedctl/enable/enable.go @@ -233,7 +233,7 @@ func CreateResources(cmdOut io.Writer, config *rest.Config, resources *typeResou if err != nil { return errors.Wrap(err, "Failed to create host clientset") } - _, err = hostClientset.CoreV1().Namespaces().Get(namespace, metav1.GetOptions{}) + _, err = hostClientset.CoreV1().Namespaces().Get(context.Background(), namespace, metav1.GetOptions{}) if apierrors.IsNotFound(err) { return errors.Wrapf(err, "KubeFed system namespace %q does not exist", namespace) } else if err != nil { @@ -268,10 +268,10 @@ func CreateResources(cmdOut io.Writer, config *rest.Config, resources *typeResou return errors.Wrap(err, "Failed to create crd clientset") } - existingCRD, err := crdClient.CustomResourceDefinitions().Get(resources.CRD.Name, metav1.GetOptions{}) + existingCRD, err := crdClient.CustomResourceDefinitions().Get(context.Background(), resources.CRD.Name, metav1.GetOptions{}) if apierrors.IsNotFound(err) { if !dryRun { - _, err = crdClient.CustomResourceDefinitions().Create(resources.CRD) + _, err = crdClient.CustomResourceDefinitions().Create(context.Background(), resources.CRD, metav1.CreateOptions{}) if err != nil { return errors.Wrapf(err, "Error creating CRD %q", resources.CRD.Name) } @@ -315,7 +315,7 @@ func CreateResources(cmdOut io.Writer, config *rest.Config, resources *typeResou existingCRD.Spec = resources.CRD.Spec if !dryRun { - _, err = crdClient.CustomResourceDefinitions().Update(existingCRD) + _, err = crdClient.CustomResourceDefinitions().Update(context.Background(), existingCRD, metav1.UpdateOptions{}) if err != nil { return errors.Wrapf(err, "Error updating CRD %q", resources.CRD.Name) } diff --git a/pkg/kubefedctl/enable/schema.go b/pkg/kubefedctl/enable/schema.go index f6994ca985..172b3ff90e 100644 --- a/pkg/kubefedctl/enable/schema.go +++ b/pkg/kubefedctl/enable/schema.go @@ -17,6 +17,7 @@ limitations under the License. package enable import ( + "context" "fmt" "github.com/pkg/errors" @@ -63,7 +64,7 @@ func newCRDSchemaAccessor(config *rest.Config, apiResource metav1.APIResource) ( return nil, errors.Wrap(err, "Failed to create crd clientset") } crdName := fmt.Sprintf("%s.%s", apiResource.Name, apiResource.Group) - crd, err := crdClient.CustomResourceDefinitions().Get(crdName, metav1.GetOptions{}) + crd, err := crdClient.CustomResourceDefinitions().Get(context.Background(), crdName, metav1.GetOptions{}) if apierrors.IsNotFound(err) { return nil, nil } diff --git a/pkg/kubefedctl/federate/federate.go b/pkg/kubefedctl/federate/federate.go index 338306cf31..7a8f1d28d5 100644 --- a/pkg/kubefedctl/federate/federate.go +++ b/pkg/kubefedctl/federate/federate.go @@ -348,7 +348,7 @@ func getTargetResource(hostConfig *rest.Config, typeConfig typeconfig.Interface, } kind := targetAPIResource.Kind - resource, err := targetClient.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + resource, err := targetClient.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err != nil { return nil, errors.Wrapf(err, "Error retrieving target %s %q", kind, qualifiedName) } @@ -486,7 +486,7 @@ func CreateFederatedResource(hostConfig *rest.Config, typeConfig typeconfig.Inte // It might take a little while for the federated type to appear if the // same is being enabled while or immediately before federating the resource. err = wait.PollImmediate(createResourceRetryInterval, createResourceRetryTimeout, func() (bool, error) { - _, err := fedClient.Resources(federatedResource.GetNamespace()).Create(federatedResource, metav1.CreateOptions{}) + _, err := fedClient.Resources(federatedResource.GetNamespace()).Create(context.Background(), federatedResource, metav1.CreateOptions{}) if apierrors.IsNotFound(err) { return false, nil } diff --git a/pkg/kubefedctl/federate/util.go b/pkg/kubefedctl/federate/util.go index d1518552e7..df4cae7648 100644 --- a/pkg/kubefedctl/federate/util.go +++ b/pkg/kubefedctl/federate/util.go @@ -18,6 +18,7 @@ package federate import ( "bufio" + "context" "io" "os" @@ -191,7 +192,7 @@ func getResourcesInNamespace(config *rest.Config, namespace string, skipAPIResou return nil, errors.Wrapf(err, "Error creating client for %s", apiResource.Kind) } - resourceList, err := client.Resources(namespace).List(metav1.ListOptions{}) + resourceList, err := client.Resources(namespace).List(context.Background(), metav1.ListOptions{}) if apierrors.IsNotFound(err) || resourceList == nil { continue } diff --git a/pkg/kubefedctl/join.go b/pkg/kubefedctl/join.go index af51246bc8..ebcaf1d7a9 100644 --- a/pkg/kubefedctl/join.go +++ b/pkg/kubefedctl/join.go @@ -296,8 +296,9 @@ func performPreflightChecks(clusterClientset kubeclient.Interface, name, hostClu kubefedNamespace string, errorOnExisting bool) error { // Make sure there is no existing service account in the joining cluster. saName := util.ClusterServiceAccountName(name, hostClusterName) - _, err := clusterClientset.CoreV1().ServiceAccounts(kubefedNamespace).Get(saName, - metav1.GetOptions{}) + _, err := clusterClientset.CoreV1().ServiceAccounts(kubefedNamespace).Get( + context.Background(), saName, metav1.GetOptions{}, + ) switch { case apierrors.IsNotFound(err): @@ -376,7 +377,9 @@ func createKubeFedNamespace(clusterClientset kubeclient.Interface, kubefedNamesp return fedNamespace, nil } - _, err := clusterClientset.CoreV1().Namespaces().Get(kubefedNamespace, metav1.GetOptions{}) + _, err := clusterClientset.CoreV1().Namespaces().Get( + context.Background(), kubefedNamespace, metav1.GetOptions{}, + ) if err != nil && !apierrors.IsNotFound(err) { klog.V(2).Infof("Could not get %s namespace: %v", kubefedNamespace, err) return nil, err @@ -388,7 +391,9 @@ func createKubeFedNamespace(clusterClientset kubeclient.Interface, kubefedNamesp } // Not found, so create. - _, err = clusterClientset.CoreV1().Namespaces().Create(fedNamespace) + _, err = clusterClientset.CoreV1().Namespaces().Create( + context.Background(), fedNamespace, metav1.CreateOptions{}, + ) if err != nil && !apierrors.IsAlreadyExists(err) { klog.V(2).Infof("Could not create %s namespace: %v", kubefedNamespace, err) return nil, err @@ -476,7 +481,9 @@ func createServiceAccount(clusterClientset kubeclient.Interface, namespace, } // Create a new service account. - _, err := clusterClientset.CoreV1().ServiceAccounts(namespace).Create(sa) + _, err := clusterClientset.CoreV1().ServiceAccounts(namespace).Create( + context.Background(), sa, metav1.CreateOptions{}, + ) switch { case apierrors.IsAlreadyExists(err) && errorOnExisting: klog.V(2).Infof("Service account %s/%s already exists in target cluster %s", namespace, saName, joiningClusterName) @@ -516,7 +523,7 @@ func createClusterRoleAndBinding(clientset kubeclient.Interface, saName, namespa }, Rules: clusterPolicyRules, } - existingRole, err := clientset.RbacV1().ClusterRoles().Get(roleName, metav1.GetOptions{}) + existingRole, err := clientset.RbacV1().ClusterRoles().Get(context.Background(), roleName, metav1.GetOptions{}) switch { case err != nil && !apierrors.IsNotFound(err): klog.V(2).Infof("Could not get cluster role for service account %s in joining cluster %s due to %v", @@ -526,14 +533,14 @@ func createClusterRoleAndBinding(clientset kubeclient.Interface, saName, namespa return errors.Errorf("cluster role for service account %s in joining cluster %s already exists", saName, clusterName) case err == nil: existingRole.Rules = role.Rules - _, err := clientset.RbacV1().ClusterRoles().Update(existingRole) + _, err := clientset.RbacV1().ClusterRoles().Update(context.Background(), existingRole, metav1.UpdateOptions{}) if err != nil { klog.V(2).Infof("Could not update cluster role for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) return err } default: // role was not found - _, err := clientset.RbacV1().ClusterRoles().Create(role) + _, err := clientset.RbacV1().ClusterRoles().Create(context.Background(), role, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create cluster role for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -553,7 +560,7 @@ func createClusterRoleAndBinding(clientset kubeclient.Interface, saName, namespa Name: roleName, }, } - existingBinding, err := clientset.RbacV1().ClusterRoleBindings().Get(binding.Name, metav1.GetOptions{}) + existingBinding, err := clientset.RbacV1().ClusterRoleBindings().Get(context.Background(), binding.Name, metav1.GetOptions{}) switch { case err != nil && !apierrors.IsNotFound(err): klog.V(2).Infof("Could not get cluster role binding for service account %s in joining cluster %s due to %v", @@ -565,13 +572,13 @@ func createClusterRoleAndBinding(clientset kubeclient.Interface, saName, namespa // The roleRef cannot be updated, therefore if the existing roleRef is different, the existing rolebinding // must be deleted and recreated with the correct roleRef if !reflect.DeepEqual(existingBinding.RoleRef, binding.RoleRef) { - err = clientset.RbacV1().ClusterRoleBindings().Delete(existingBinding.Name, &metav1.DeleteOptions{}) + err = clientset.RbacV1().ClusterRoleBindings().Delete(context.Background(), existingBinding.Name, metav1.DeleteOptions{}) if err != nil { klog.V(2).Infof("Could not delete existing cluster role binding for service account %s in joining cluster %s due to: %v", saName, clusterName, err) return err } - _, err = clientset.RbacV1().ClusterRoleBindings().Create(binding) + _, err = clientset.RbacV1().ClusterRoleBindings().Create(context.Background(), binding, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create cluster role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -579,7 +586,7 @@ func createClusterRoleAndBinding(clientset kubeclient.Interface, saName, namespa } } else { existingBinding.Subjects = binding.Subjects - _, err := clientset.RbacV1().ClusterRoleBindings().Update(existingBinding) + _, err := clientset.RbacV1().ClusterRoleBindings().Update(context.Background(), existingBinding, metav1.UpdateOptions{}) if err != nil { klog.V(2).Infof("Could not update cluster role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -587,7 +594,7 @@ func createClusterRoleAndBinding(clientset kubeclient.Interface, saName, namespa } } default: - _, err = clientset.RbacV1().ClusterRoleBindings().Create(binding) + _, err = clientset.RbacV1().ClusterRoleBindings().Create(context.Background(), binding, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create cluster role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -613,7 +620,7 @@ func createRoleAndBinding(clientset kubeclient.Interface, saName, namespace, clu }, Rules: namespacedPolicyRules, } - existingRole, err := clientset.RbacV1().Roles(namespace).Get(roleName, metav1.GetOptions{}) + existingRole, err := clientset.RbacV1().Roles(namespace).Get(context.Background(), roleName, metav1.GetOptions{}) switch { case err != nil && !apierrors.IsNotFound(err): klog.V(2).Infof("Could not retrieve role for service account %s in joining cluster %s due to %v", saName, clusterName, err) @@ -622,14 +629,14 @@ func createRoleAndBinding(clientset kubeclient.Interface, saName, namespace, clu return errors.Errorf("role for service account %s in joining cluster %s already exists", saName, clusterName) case err == nil: existingRole.Rules = role.Rules - _, err = clientset.RbacV1().Roles(namespace).Update(existingRole) + _, err = clientset.RbacV1().Roles(namespace).Update(context.Background(), existingRole, metav1.UpdateOptions{}) if err != nil { klog.V(2).Infof("Could not update role for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) return err } default: - _, err := clientset.RbacV1().Roles(namespace).Create(role) + _, err := clientset.RbacV1().Roles(namespace).Create(context.Background(), role, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create role for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -649,7 +656,7 @@ func createRoleAndBinding(clientset kubeclient.Interface, saName, namespace, clu }, } - existingBinding, err := clientset.RbacV1().RoleBindings(namespace).Get(binding.Name, metav1.GetOptions{}) + existingBinding, err := clientset.RbacV1().RoleBindings(namespace).Get(context.Background(), binding.Name, metav1.GetOptions{}) switch { case err != nil && !apierrors.IsNotFound(err): klog.V(2).Infof("Could not retrieve role binding for service account %s in joining cluster %s due to: %v", @@ -661,13 +668,13 @@ func createRoleAndBinding(clientset kubeclient.Interface, saName, namespace, clu // The roleRef cannot be updated, therefore if the existing roleRef is different, the existing rolebinding // must be deleted and recreated with the correct roleRef if !reflect.DeepEqual(existingBinding.RoleRef, binding.RoleRef) { - err = clientset.RbacV1().RoleBindings(namespace).Delete(existingBinding.Name, &metav1.DeleteOptions{}) + err = clientset.RbacV1().RoleBindings(namespace).Delete(context.Background(), existingBinding.Name, metav1.DeleteOptions{}) if err != nil { klog.V(2).Infof("Could not delete existing role binding for service account %s in joining cluster %s due to: %v", saName, clusterName, err) return err } - _, err = clientset.RbacV1().RoleBindings(namespace).Create(binding) + _, err = clientset.RbacV1().RoleBindings(namespace).Create(context.Background(), binding, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -675,7 +682,7 @@ func createRoleAndBinding(clientset kubeclient.Interface, saName, namespace, clu } } else { existingBinding.Subjects = binding.Subjects - _, err = clientset.RbacV1().RoleBindings(namespace).Update(existingBinding) + _, err = clientset.RbacV1().RoleBindings(namespace).Update(context.Background(), existingBinding, metav1.UpdateOptions{}) if err != nil { klog.V(2).Infof("Could not update role binding for service account %s in joining cluster %s due to: %v", saName, clusterName, err) @@ -683,7 +690,7 @@ func createRoleAndBinding(clientset kubeclient.Interface, saName, namespace, clu } } default: - _, err = clientset.RbacV1().RoleBindings(namespace).Create(binding) + _, err = clientset.RbacV1().RoleBindings(namespace).Create(context.Background(), binding, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -722,7 +729,7 @@ func createHealthCheckClusterRoleAndBinding(clientset kubeclient.Interface, saNa }, }, } - existingRole, err := clientset.RbacV1().ClusterRoles().Get(role.Name, metav1.GetOptions{}) + existingRole, err := clientset.RbacV1().ClusterRoles().Get(context.Background(), role.Name, metav1.GetOptions{}) switch { case err != nil && !apierrors.IsNotFound(err): klog.V(2).Infof("Could not get health check cluster role for service account %s in joining cluster %s due to %v", @@ -732,14 +739,14 @@ func createHealthCheckClusterRoleAndBinding(clientset kubeclient.Interface, saNa return errors.Errorf("health check cluster role for service account %s in joining cluster %s already exists", saName, clusterName) case err == nil: existingRole.Rules = role.Rules - _, err := clientset.RbacV1().ClusterRoles().Update(existingRole) + _, err := clientset.RbacV1().ClusterRoles().Update(context.Background(), existingRole, metav1.UpdateOptions{}) if err != nil { klog.V(2).Infof("Could not update health check cluster role for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) return err } default: // role was not found - _, err := clientset.RbacV1().ClusterRoles().Create(role) + _, err := clientset.RbacV1().ClusterRoles().Create(context.Background(), role, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create health check cluster role for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -758,7 +765,7 @@ func createHealthCheckClusterRoleAndBinding(clientset kubeclient.Interface, saNa Name: roleName, }, } - existingBinding, err := clientset.RbacV1().ClusterRoleBindings().Get(binding.Name, metav1.GetOptions{}) + existingBinding, err := clientset.RbacV1().ClusterRoleBindings().Get(context.Background(), binding.Name, metav1.GetOptions{}) switch { case err != nil && !apierrors.IsNotFound(err): klog.V(2).Infof("Could not get health check cluster role binding for service account %s in joining cluster %s due to %v", @@ -770,13 +777,13 @@ func createHealthCheckClusterRoleAndBinding(clientset kubeclient.Interface, saNa // The roleRef cannot be updated, therefore if the existing roleRef is different, the existing rolebinding // must be deleted and recreated with the correct roleRef if !reflect.DeepEqual(existingBinding.RoleRef, binding.RoleRef) { - err = clientset.RbacV1().ClusterRoleBindings().Delete(existingBinding.Name, &metav1.DeleteOptions{}) + err = clientset.RbacV1().ClusterRoleBindings().Delete(context.Background(), existingBinding.Name, metav1.DeleteOptions{}) if err != nil { klog.V(2).Infof("Could not delete existing health check cluster role binding for service account %s in joining cluster %s due to: %v", saName, clusterName, err) return err } - _, err = clientset.RbacV1().ClusterRoleBindings().Create(binding) + _, err = clientset.RbacV1().ClusterRoleBindings().Create(context.Background(), binding, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create health check cluster role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -784,7 +791,7 @@ func createHealthCheckClusterRoleAndBinding(clientset kubeclient.Interface, saNa } } else { existingBinding.Subjects = binding.Subjects - _, err := clientset.RbacV1().ClusterRoleBindings().Update(existingBinding) + _, err := clientset.RbacV1().ClusterRoleBindings().Update(context.Background(), existingBinding, metav1.UpdateOptions{}) if err != nil { klog.V(2).Infof("Could not update health check cluster role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -792,7 +799,7 @@ func createHealthCheckClusterRoleAndBinding(clientset kubeclient.Interface, saNa } } default: - _, err = clientset.RbacV1().ClusterRoleBindings().Create(binding) + _, err = clientset.RbacV1().ClusterRoleBindings().Create(context.Background(), binding, metav1.CreateOptions{}) if err != nil { klog.V(2).Infof("Could not create health check cluster role binding for service account: %s in joining cluster: %s due to: %v", saName, clusterName, err) @@ -821,8 +828,9 @@ func populateSecretInHostCluster(clusterClientset, hostClientset kubeclient.Inte // Get the secret from the joining cluster. var secret *corev1.Secret err := wait.PollImmediate(1*time.Second, serviceAccountSecretTimeout, func() (bool, error) { - sa, err := clusterClientset.CoreV1().ServiceAccounts(joiningNamespace).Get(saName, - metav1.GetOptions{}) + sa, err := clusterClientset.CoreV1().ServiceAccounts(joiningNamespace).Get( + context.Background(), saName, metav1.GetOptions{}, + ) if err != nil { return false, nil } @@ -830,8 +838,9 @@ func populateSecretInHostCluster(clusterClientset, hostClientset kubeclient.Inte for _, objReference := range sa.Secrets { saSecretName := objReference.Name var err error - secret, err = clusterClientset.CoreV1().Secrets(joiningNamespace).Get(saSecretName, - metav1.GetOptions{}) + secret, err = clusterClientset.CoreV1().Secrets(joiningNamespace).Get( + context.Background(), saSecretName, metav1.GetOptions{}, + ) if err != nil { return false, nil } @@ -869,7 +878,9 @@ func populateSecretInHostCluster(clusterClientset, hostClientset kubeclient.Inte v1Secret.Name = secretName } - v1SecretResult, err := hostClientset.CoreV1().Secrets(hostNamespace).Create(&v1Secret) + v1SecretResult, err := hostClientset.CoreV1().Secrets(hostNamespace).Create( + context.Background(), &v1Secret, metav1.CreateOptions{}, + ) if err != nil { klog.V(2).Infof("Could not create secret in host cluster: %v", err) return nil, nil, err diff --git a/pkg/kubefedctl/orphaning/disable.go b/pkg/kubefedctl/orphaning/disable.go index c108d9c665..743e45b0ee 100644 --- a/pkg/kubefedctl/orphaning/disable.go +++ b/pkg/kubefedctl/orphaning/disable.go @@ -17,6 +17,7 @@ limitations under the License. package orphaning import ( + "context" "io" "github.com/pkg/errors" @@ -89,7 +90,7 @@ func (o *orphanResource) RunDisable(cmdOut io.Writer, config util.FedConfig) err return nil } ctlutil.DisableOrphaning(fedResource) - _, err = resourceClient.Update(fedResource, metav1.UpdateOptions{}) + _, err = resourceClient.Update(context.Background(), fedResource, metav1.UpdateOptions{}) if err != nil { return errors.Wrapf(err, "Failed to update resource %s %q", fedResource.GetKind(), ctlutil.QualifiedName{Name: fedResource.GetName(), Namespace: fedResource.GetNamespace()}) diff --git a/pkg/kubefedctl/orphaning/enable.go b/pkg/kubefedctl/orphaning/enable.go index 41d2c09a38..f3a6f337c4 100644 --- a/pkg/kubefedctl/orphaning/enable.go +++ b/pkg/kubefedctl/orphaning/enable.go @@ -17,6 +17,7 @@ limitations under the License. package orphaning import ( + "context" "io" "github.com/pkg/errors" @@ -89,7 +90,7 @@ func (o *orphanResource) RunEnable(cmdOut io.Writer, config util.FedConfig) erro return nil } ctlutil.EnableOrphaning(fedResource) - _, err = resourceClient.Update(fedResource, metav1.UpdateOptions{}) + _, err = resourceClient.Update(context.Background(), fedResource, metav1.UpdateOptions{}) if err != nil { return errors.Wrapf(err, "Failed to update resource %s %q", fedResource.GetKind(), ctlutil.QualifiedName{Name: fedResource.GetName(), Namespace: fedResource.GetNamespace()}) diff --git a/pkg/kubefedctl/orphaning/orphaning.go b/pkg/kubefedctl/orphaning/orphaning.go index d625ed43d6..876fa6d8a0 100644 --- a/pkg/kubefedctl/orphaning/orphaning.go +++ b/pkg/kubefedctl/orphaning/orphaning.go @@ -17,6 +17,7 @@ limitations under the License. package orphaning import ( + "context" "fmt" "io" @@ -131,7 +132,7 @@ func (o *orphanResource) GetResourceClient(config util.FedConfig, cmdOut io.Writ // Returns the Federated resource where the orphaning-deletion will be managed func (o *orphanResource) GetFederatedResource(resourceClient dynamic.ResourceInterface) (*unstructured.Unstructured, error) { - resource, err := resourceClient.Get(o.resourceName, metav1.GetOptions{}) + resource, err := resourceClient.Get(context.Background(), o.resourceName, metav1.GetOptions{}) if err != nil { return nil, errors.Wrapf(err, "Failed to retrieve resource: %q", ctlutil.QualifiedName{Name: o.resourceName, Namespace: o.resourceNamespace}) diff --git a/pkg/kubefedctl/unjoin.go b/pkg/kubefedctl/unjoin.go index 577a920d94..50d6b068f2 100644 --- a/pkg/kubefedctl/unjoin.go +++ b/pkg/kubefedctl/unjoin.go @@ -244,8 +244,9 @@ func deleteFederatedClusterAndSecret(hostClientset kubeclient.Interface, client return errors.Wrapf(err, "Failed to get kubefed cluster \"%s/%s\"", kubefedNamespace, unjoiningClusterName) } - err = hostClientset.CoreV1().Secrets(kubefedNamespace).Delete(fedCluster.Spec.SecretRef.Name, - &metav1.DeleteOptions{}) + err = hostClientset.CoreV1().Secrets(kubefedNamespace).Delete( + context.Background(), fedCluster.Spec.SecretRef.Name, metav1.DeleteOptions{}, + ) if apierrors.IsNotFound(err) { klog.V(2).Infof("Secret \"%s/%s\" does not exist in the host cluster.", kubefedNamespace, fedCluster.Spec.SecretRef.Name) } else if err != nil { @@ -305,12 +306,12 @@ func deleteFedNSFromUnjoinCluster(hostClientset, unjoiningClusterClientset kubec return nil } - hostClusterNamespace, err := hostClientset.CoreV1().Namespaces().Get(kubefedNamespace, metav1.GetOptions{}) + hostClusterNamespace, err := hostClientset.CoreV1().Namespaces().Get(context.Background(), kubefedNamespace, metav1.GetOptions{}) if err != nil { return errors.Wrapf(err, "Error retrieving namespace %q from host cluster", kubefedNamespace) } - unjoiningClusterNamespace, err := unjoiningClusterClientset.CoreV1().Namespaces().Get(kubefedNamespace, metav1.GetOptions{}) + unjoiningClusterNamespace, err := unjoiningClusterClientset.CoreV1().Namespaces().Get(context.Background(), kubefedNamespace, metav1.GetOptions{}) if err != nil { return errors.Wrapf(err, "Error retrieving namespace %q from unjoining cluster %q", kubefedNamespace, unjoiningClusterName) } @@ -321,7 +322,7 @@ func deleteFedNSFromUnjoinCluster(hostClientset, unjoiningClusterClientset kubec } klog.V(2).Infof("Deleting kubefed namespace %q from unjoining cluster %q.", kubefedNamespace, unjoiningClusterName) - err = unjoiningClusterClientset.CoreV1().Namespaces().Delete(kubefedNamespace, &metav1.DeleteOptions{}) + err = unjoiningClusterClientset.CoreV1().Namespaces().Delete(context.Background(), kubefedNamespace, metav1.DeleteOptions{}) if apierrors.IsNotFound(err) { klog.V(2).Infof("The kubefed namespace %q no longer exists in unjoining cluster %q.", kubefedNamespace, unjoiningClusterName) return nil @@ -346,8 +347,9 @@ func deleteServiceAccount(clusterClientset kubeclient.Interface, saName, klog.V(2).Infof("Deleting service account \"%s/%s\" in unjoining cluster %q.", namespace, saName, unjoiningClusterName) // Delete a service account. - err := clusterClientset.CoreV1().ServiceAccounts(namespace).Delete(saName, - &metav1.DeleteOptions{}) + err := clusterClientset.CoreV1().ServiceAccounts(namespace).Delete( + context.Background(), saName, metav1.DeleteOptions{}, + ) if apierrors.IsNotFound(err) { klog.V(2).Infof("Service account \"%s/%s\" does not exist.", namespace, saName) } else if err != nil { @@ -376,7 +378,7 @@ func deleteClusterRoleAndBinding(clusterClientset kubeclient.Interface, klog.V(2).Infof("Deleting cluster role binding %q for service account %q in unjoining cluster %q.", name, saName, unjoiningClusterName) - err := clusterClientset.RbacV1().ClusterRoleBindings().Delete(name, &metav1.DeleteOptions{}) + err := clusterClientset.RbacV1().ClusterRoleBindings().Delete(context.Background(), name, metav1.DeleteOptions{}) if apierrors.IsNotFound(err) { klog.V(2).Infof("Cluster role binding %q for service account %q does not exist in unjoining cluster %q.", name, saName, unjoiningClusterName) @@ -394,7 +396,7 @@ func deleteClusterRoleAndBinding(clusterClientset kubeclient.Interface, klog.V(2).Infof("Deleting cluster role %q for service account %q in unjoining cluster %q.", name, saName, unjoiningClusterName) - err = clusterClientset.RbacV1().ClusterRoles().Delete(name, &metav1.DeleteOptions{}) + err = clusterClientset.RbacV1().ClusterRoles().Delete(context.Background(), name, metav1.DeleteOptions{}) if apierrors.IsNotFound(err) { klog.V(2).Infof("Cluster role %q for service account %q does not exist in unjoining cluster %q.", name, saName, unjoiningClusterName) @@ -413,7 +415,7 @@ func deleteClusterRoleAndBinding(clusterClientset kubeclient.Interface, klog.V(2).Infof("Deleting role binding \"%s/%s\" for service account %q in unjoining cluster %q.", namespace, roleName, saName, unjoiningClusterName) - err := clusterClientset.RbacV1().RoleBindings(namespace).Delete(roleName, &metav1.DeleteOptions{}) + err := clusterClientset.RbacV1().RoleBindings(namespace).Delete(context.Background(), roleName, metav1.DeleteOptions{}) if apierrors.IsNotFound(err) { klog.V(2).Infof("Role binding \"%s/%s\" for service account %q does not exist in unjoining cluster %q.", namespace, roleName, saName, unjoiningClusterName) @@ -431,7 +433,7 @@ func deleteClusterRoleAndBinding(clusterClientset kubeclient.Interface, klog.V(2).Infof("Deleting role \"%s/%s\" for service account %q in unjoining cluster %q.", namespace, roleName, saName, unjoiningClusterName) - err = clusterClientset.RbacV1().Roles(namespace).Delete(roleName, &metav1.DeleteOptions{}) + err = clusterClientset.RbacV1().Roles(namespace).Delete(context.Background(), roleName, metav1.DeleteOptions{}) if apierrors.IsNotFound(err) { klog.V(2).Infof("Role \"%s/%s\" for service account %q does not exist in unjoining cluster %q.", namespace, roleName, saName, unjoiningClusterName) diff --git a/pkg/schedulingtypes/plugin.go b/pkg/schedulingtypes/plugin.go index e02e3f6436..34f5ba8a0a 100644 --- a/pkg/schedulingtypes/plugin.go +++ b/pkg/schedulingtypes/plugin.go @@ -17,6 +17,7 @@ limitations under the License. package schedulingtypes import ( + "context" "fmt" "reflect" "sort" @@ -133,7 +134,7 @@ func (p *Plugin) FederatedTypeExists(key string) bool { } func (p *Plugin) Reconcile(qualifiedName util.QualifiedName, result map[string]int64) error { - fedObject, err := p.federatedTypeClient.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + fedObject, err := p.federatedTypeClient.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err != nil && apierrors.IsNotFound(err) { // Federated resource has been deleted - no further action required return nil @@ -173,7 +174,7 @@ func (p *Plugin) Reconcile(qualifiedName util.QualifiedName, result map[string]i } if isDirty { - _, err := p.federatedTypeClient.Resources(qualifiedName.Namespace).Update(fedObject, metav1.UpdateOptions{}) + _, err := p.federatedTypeClient.Resources(qualifiedName.Namespace).Update(context.Background(), fedObject, metav1.UpdateOptions{}) if err != nil { return err } diff --git a/scripts/pre-commit.sh b/scripts/pre-commit.sh index b57fef928d..0197a7eb63 100755 --- a/scripts/pre-commit.sh +++ b/scripts/pre-commit.sh @@ -151,7 +151,7 @@ echo "Downloading e2e test dependencies" ./scripts/download-e2e-binaries.sh CREATE_INSECURE_REGISTRY=y CONFIGURE_INSECURE_REGISTRY_HOST=y \ - KIND_TAG="v1.17.5" ./scripts/create-clusters.sh + KIND_TAG="v1.18.4" ./scripts/create-clusters.sh # Initialize list of clusters to join join-cluster-list > /dev/null diff --git a/test/common/crudtester.go b/test/common/crudtester.go index eebb8ebdaf..35630df387 100644 --- a/test/common/crudtester.go +++ b/test/common/crudtester.go @@ -22,7 +22,7 @@ import ( "strings" "time" - "github.com/evanphx/json-patch" + jsonpatch "github.com/evanphx/json-patch" "github.com/pkg/errors" apiv1 "k8s.io/api/core/v1" @@ -267,7 +267,7 @@ func (c *FederatedTypeCrudTester) CheckDelete(fedObject *unstructured.Unstructur err := wait.PollImmediate(c.waitInterval, wait.ForeverTestTimeout, func() (bool, error) { var err error if fedObject == nil { - fedObject, err = client.Resources(namespace).Get(name, metav1.GetOptions{}) + fedObject, err = client.Resources(namespace).Get(context.Background(), name, metav1.GetOptions{}) if err != nil { c.tl.Logf("Error retrieving %s %q to add the %q annotation: %v", federatedKind, qualifiedName, orphanKey, err) return false, nil @@ -277,7 +277,7 @@ func (c *FederatedTypeCrudTester) CheckDelete(fedObject *unstructured.Unstructur return true, nil } util.EnableOrphaning(fedObject) - fedObject, err = client.Resources(namespace).Update(fedObject, metav1.UpdateOptions{}) + fedObject, err = client.Resources(namespace).Update(context.Background(), fedObject, metav1.UpdateOptions{}) if err == nil { return true, nil } @@ -292,7 +292,7 @@ func (c *FederatedTypeCrudTester) CheckDelete(fedObject *unstructured.Unstructur } c.tl.Logf("Deleting %s %q", federatedKind, qualifiedName) - err := client.Resources(namespace).Delete(name, nil) + err := client.Resources(namespace).Delete(context.Background(), name, metav1.DeleteOptions{}) if err != nil { c.tl.Fatalf("Error deleting %s %q: %v", federatedKind, qualifiedName, err) } @@ -308,7 +308,7 @@ func (c *FederatedTypeCrudTester) CheckDelete(fedObject *unstructured.Unstructur // Wait for deletion. The federated resource will only be removed once managed resources have // been deleted or orphaned. err = wait.PollImmediate(c.waitInterval, waitTimeout, func() (bool, error) { - _, err := client.Resources(namespace).Get(name, metav1.GetOptions{}) + _, err := client.Resources(namespace).Get(context.Background(), name, metav1.GetOptions{}) if apierrors.IsNotFound(err) { return true, nil } @@ -333,7 +333,7 @@ func (c *FederatedTypeCrudTester) CheckDelete(fedObject *unstructured.Unstructur for clusterName, testCluster := range c.testClusters { namespace = util.QualifiedNameForCluster(clusterName, qualifiedName).Namespace err = wait.PollImmediate(c.waitInterval, waitTimeout, func() (bool, error) { - obj, err := testCluster.Client.Resources(namespace).Get(name, metav1.GetOptions{}) + obj, err := testCluster.Client.Resources(namespace).Get(context.Background(), name, metav1.GetOptions{}) switch { case !deletingInCluster && apierrors.IsNotFound(err): return false, errors.Errorf("%s %q was unexpectedly deleted from cluster %q", targetKind, qualifiedName, clusterName) @@ -526,7 +526,7 @@ func (c *FederatedTypeCrudTester) checkHostNamespaceUnlabeled(client util.Resour // deleted when it is not targeted by placement. err := wait.PollImmediate(c.waitInterval, c.clusterWaitTimeout, func() (bool, error) { - hostNamespace, err := client.Resources("").Get(qualifiedName.Name, metav1.GetOptions{}) + hostNamespace, err := client.Resources("").Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err != nil { c.tl.Errorf("Error retrieving %s %q in host cluster %q: %v", targetKind, qualifiedName, clusterName, err) return false, nil @@ -546,7 +546,7 @@ func (c *FederatedTypeCrudTester) waitForResource(client util.ResourceClient, qu return false, nil } - clusterObj, err := client.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + clusterObj, err := client.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err == nil && util.ObjectVersion(clusterObj) == expectedVersion { // Validate that the resource has been labeled properly, // indicating creation or adoption by the sync controller. This @@ -597,7 +597,7 @@ func (c *FederatedTypeCrudTester) TestClusters() map[string]TestCluster { func (c *FederatedTypeCrudTester) waitForResourceDeletion(client util.ResourceClient, qualifiedName util.QualifiedName, versionRemoved func() bool) error { err := wait.PollImmediate(c.waitInterval, c.clusterWaitTimeout, func() (bool, error) { - _, err := client.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + _, err := client.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if apierrors.IsNotFound(err) { if !versionRemoved() { c.tl.Logf("Removal of %q %s successful, but propagated version still exists", c.typeConfig.GetTargetType().Kind, qualifiedName) @@ -620,11 +620,11 @@ func (c *FederatedTypeCrudTester) updateObject(apiResource metav1.APIResource, o mutateResourceFunc(obj) var err error - updatedObj, err = client.Resources(obj.GetNamespace()).Update(obj, metav1.UpdateOptions{}) + updatedObj, err = client.Resources(obj.GetNamespace()).Update(context.Background(), obj, metav1.UpdateOptions{}) if apierrors.IsConflict(err) { // The resource was updated by the KubeFed controller. // Get the latest version and retry. - obj, err = client.Resources(obj.GetNamespace()).Get(obj.GetName(), metav1.GetOptions{}) + obj, err = client.Resources(obj.GetNamespace()).Get(context.Background(), obj.GetName(), metav1.GetOptions{}) return false, err } // Be tolerant of a slow server @@ -725,7 +725,7 @@ func (c *FederatedTypeCrudTester) CheckStatusCreated(qualifiedName util.Qualifie client := c.resourceClient(*statusAPIResource) err := wait.PollImmediate(c.waitInterval, wait.ForeverTestTimeout, func() (bool, error) { - _, err := client.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + _, err := client.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err != nil && !apierrors.IsNotFound(err) { c.tl.Errorf("An unexpected error occurred while polling for desired status: %v", err) } diff --git a/test/common/dns.go b/test/common/dns.go index 372bd77b91..fed59fcec9 100644 --- a/test/common/dns.go +++ b/test/common/dns.go @@ -17,6 +17,7 @@ limitations under the License. package common import ( + "context" "reflect" "time" @@ -137,7 +138,7 @@ func Equivalent(actual, desired pkgruntime.Object) bool { // WaitForNamespace waits for namespace to be created in a cluster. func WaitForNamespaceOrDie(tl TestLogger, client kubeclientset.Interface, clusterName, namespace string, interval, timeout time.Duration) { err := wait.PollImmediate(interval, timeout, func() (exist bool, err error) { - _, err = client.CoreV1().Namespaces().Get(namespace, metav1.GetOptions{}) + _, err = client.CoreV1().Namespaces().Get(context.Background(), namespace, metav1.GetOptions{}) if errors.IsNotFound(err) { return false, nil } diff --git a/test/common/resource_helper.go b/test/common/resource_helper.go index 20b8b9b57b..1dc31e51fa 100644 --- a/test/common/resource_helper.go +++ b/test/common/resource_helper.go @@ -17,6 +17,7 @@ limitations under the License. package common import ( + "context" "fmt" "github.com/pkg/errors" @@ -40,7 +41,7 @@ func CreateResource(kubeconfig *restclient.Config, apiResource metav1.APIResourc if err != nil { return nil, errors.Wrapf(err, "Error creating resource client") } - obj, err := client.Resources(namespace).Create(desiredObj, metav1.CreateOptions{}) + obj, err := client.Resources(namespace).Create(context.Background(), desiredObj, metav1.CreateOptions{}) if err != nil { return nil, errors.Wrapf(err, "Error creating %s", resourceMsg) } diff --git a/test/e2e/crd.go b/test/e2e/crd.go index b2e109ce50..cde3497b34 100644 --- a/test/e2e/crd.go +++ b/test/e2e/crd.go @@ -17,6 +17,7 @@ limitations under the License. package e2e import ( + "context" "fmt" "strings" @@ -233,7 +234,7 @@ func waitForCrd(config *rest.Config, tl common.TestLogger, apiResource metav1.AP tl.Fatalf("Error creating client for crd %q: %v", apiResource.Kind, err) } err = wait.PollImmediate(framework.PollInterval, framework.TestContext.SingleCallTimeout, func() (bool, error) { - _, err := client.Resources("invalid").Get("invalid", metav1.GetOptions{}) + _, err := client.Resources("invalid").Get(context.Background(), "invalid", metav1.GetOptions{}) if apierrors.IsNotFound(err) { return true, nil } @@ -249,7 +250,7 @@ func createCrdForHost(tl common.TestLogger, client *apiextv1b1client.Apiextensio } func createCrd(tl common.TestLogger, client *apiextv1b1client.ApiextensionsV1beta1Client, crd *apiextv1b1.CustomResourceDefinition, clusterName string) *apiextv1b1.CustomResourceDefinition { - createdCrd, err := client.CustomResourceDefinitions().Create(crd) + createdCrd, err := client.CustomResourceDefinitions().Create(context.Background(), crd, metav1.CreateOptions{}) if err != nil { tl.Fatalf("Error creating crd %s in %s: %v", crd.Name, clusterMsg(clusterName), err) } @@ -259,7 +260,7 @@ func createCrd(tl common.TestLogger, client *apiextv1b1client.ApiextensionsV1bet func ensureCRDRemoval(tl common.TestLogger, client *apiextv1b1client.ApiextensionsV1beta1Client, crdName, clusterName string) { framework.AddCleanupAction(func() { - err := client.CustomResourceDefinitions().Delete(crdName, nil) + err := client.CustomResourceDefinitions().Delete(context.Background(), crdName, metav1.DeleteOptions{}) if err != nil { tl.Errorf("Error deleting crd %q in %s: %v", crdName, clusterMsg(clusterName), err) } diff --git a/test/e2e/federate.go b/test/e2e/federate.go index 87dadde10b..f1008cff87 100644 --- a/test/e2e/federate.go +++ b/test/e2e/federate.go @@ -295,13 +295,13 @@ func deleteResources(f framework.KubeFedFramework, tl common.TestLogger, typeCon func deleteResource(tl common.TestLogger, client util.ResourceClient, qualifiedName util.QualifiedName, kind string) { tl.Logf("Deleting %s %q", kind, qualifiedName) - err := client.Resources(qualifiedName.Namespace).Delete(qualifiedName.Name, &metav1.DeleteOptions{}) + err := client.Resources(qualifiedName.Namespace).Delete(context.Background(), qualifiedName.Name, metav1.DeleteOptions{}) if err != nil && !apierrors.IsNotFound(err) { tl.Fatalf("Error deleting %s %q: %v", kind, qualifiedName, err) } err = wait.PollImmediate(framework.PollInterval, framework.TestContext.SingleCallTimeout, func() (bool, error) { - _, err := client.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + _, err := client.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if apierrors.IsNotFound(err) { return true, nil } @@ -314,7 +314,7 @@ func deleteResource(tl common.TestLogger, client util.ResourceClient, qualifiedN func fedResourceFromAPI(tl common.TestLogger, typeConfig typeconfig.Interface, kubeConfig *restclient.Config, qualifiedName util.QualifiedName) *unstructured.Unstructured { client := getFedClient(tl, typeConfig, kubeConfig) - fedResource, err := client.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + fedResource, err := client.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err != nil { tl.Fatalf("Federated resource %q not found: %v", qualifiedName, err) } @@ -323,7 +323,7 @@ func fedResourceFromAPI(tl common.TestLogger, typeConfig typeconfig.Interface, k func targetResourceFromAPI(tl common.TestLogger, typeConfig typeconfig.Interface, kubeConfig *restclient.Config, qualifiedName util.QualifiedName) *unstructured.Unstructured { client := getTargetClient(tl, typeConfig, kubeConfig) - targetResource, err := client.Resources(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + targetResource, err := client.Resources(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if err != nil { tl.Fatalf("Test resource %q not found: %v", qualifiedName, err) } diff --git a/test/e2e/framework/framework.go b/test/e2e/framework/framework.go index c2f9f8ad42..b7bbd5cc59 100644 --- a/test/e2e/framework/framework.go +++ b/test/e2e/framework/framework.go @@ -312,7 +312,7 @@ func CreateNamespace(client kubeclientset.Interface, generateName string) (strin // TODO(marun) should all api calls be made 'robustly'? var namespaceName string if err := wait.PollImmediate(PollInterval, TestContext.SingleCallTimeout, func() (bool, error) { - namespace, err := client.CoreV1().Namespaces().Create(namespaceObj) + namespace, err := client.CoreV1().Namespaces().Create(context.Background(), namespaceObj, metav1.CreateOptions{}) if err != nil { Logf("Unexpected error while creating namespace: %v", err) return false, nil diff --git a/test/e2e/framework/unmanaged.go b/test/e2e/framework/unmanaged.go index 47c0ff8057..517890cd0a 100644 --- a/test/e2e/framework/unmanaged.go +++ b/test/e2e/framework/unmanaged.go @@ -162,7 +162,7 @@ func (f *UnmanagedFramework) AfterEach() { if CurrentGinkgoTestDescription().Failed && f.testNamespaceName != "" { kubeClient := f.KubeClient(userAgent) DumpEventsInNamespace(func(opts metav1.ListOptions, ns string) (*corev1.EventList, error) { - return kubeClient.CoreV1().Events(ns).List(opts) + return kubeClient.CoreV1().Events(ns).List(context.Background(), opts) }, f.testNamespaceName) } } @@ -315,7 +315,9 @@ func (f *UnmanagedFramework) setUpSyncControllerFixture(typeConfig typeconfig.In func DeleteNamespace(client kubeclientset.Interface, namespaceName string) { orphanDependents := false - if err := client.CoreV1().Namespaces().Delete(namespaceName, &metav1.DeleteOptions{OrphanDependents: &orphanDependents}); err != nil { + if err := client.CoreV1().Namespaces().Delete( + context.Background(), namespaceName, metav1.DeleteOptions{OrphanDependents: &orphanDependents}, + ); err != nil { if !apierrors.IsNotFound(err) { Failf("Error while deleting namespace %s: %s", namespaceName, err) } @@ -348,7 +350,7 @@ func DeleteNamespace(client kubeclientset.Interface, namespaceName string) { func waitForNamespaceDeletion(client kubeclientset.Interface, namespace string) error { err := wait.PollImmediate(PollInterval, TestContext.SingleCallTimeout, func() (bool, error) { - if _, err := client.CoreV1().Namespaces().Get(namespace, metav1.GetOptions{}); err != nil { + if _, err := client.CoreV1().Namespaces().Get(context.Background(), namespace, metav1.GetOptions{}); err != nil { if apierrors.IsNotFound(err) { return true, nil } diff --git a/test/e2e/ftccontroller.go b/test/e2e/ftccontroller.go index 29332804d0..9fcede9078 100644 --- a/test/e2e/ftccontroller.go +++ b/test/e2e/ftccontroller.go @@ -86,7 +86,7 @@ var _ = Describe("FTC controller", func() { waitForGenerationSynced(tl, client, objectMeta.Namespace, objectMeta.Name) By("Upgrading the CRD version from v1 to v2") - existingCrd, err := crdClient.CustomResourceDefinitions().Get(objectMeta.Name, metav1.GetOptions{}) + existingCrd, err := crdClient.CustomResourceDefinitions().Get(context.Background(), objectMeta.Name, metav1.GetOptions{}) if err != nil { tl.Fatalf("Error retrieving target CRD %q: %v", objectMeta.Name, err) } @@ -103,7 +103,7 @@ var _ = Describe("FTC controller", func() { Storage: false, }, } - _, err = crdClient.CustomResourceDefinitions().Update(existingCrd) + _, err = crdClient.CustomResourceDefinitions().Update(context.Background(), existingCrd, metav1.UpdateOptions{}) if err != nil { tl.Fatalf("Error updating target CRD version %q: %v", existingCrd.Spec.Version, err) } diff --git a/test/e2e/ingressdns.go b/test/e2e/ingressdns.go index c6e1e72b2c..afdf5e2a1a 100644 --- a/test/e2e/ingressdns.go +++ b/test/e2e/ingressdns.go @@ -155,7 +155,7 @@ func createClusterIngress(f framework.KubeFedFramework, name, namespace string, common.WaitForNamespaceOrDie(framework.NewE2ELogger(), client, clusterName, namespace, framework.PollInterval, framework.TestContext.SingleCallTimeout) - createdIngress, err := client.ExtensionsV1beta1().Ingresses(namespace).Create(ingress) + createdIngress, err := client.ExtensionsV1beta1().Ingresses(namespace).Create(context.Background(), ingress, metav1.CreateOptions{}) framework.ExpectNoError(err, "Error creating ingress in cluster %q", clusterName) createdIngress.Status = extv1b1.IngressStatus{ @@ -163,7 +163,7 @@ func createClusterIngress(f framework.KubeFedFramework, name, namespace string, } // Fake out provisioning LoadBalancer by updating the ingress status in member cluster. - _, err = client.ExtensionsV1beta1().Ingresses(namespace).UpdateStatus(createdIngress) + _, err = client.ExtensionsV1beta1().Ingresses(namespace).UpdateStatus(context.Background(), createdIngress, metav1.UpdateOptions{}) framework.ExpectNoError(err, "Error updating ingress status in cluster %q", clusterName) } diff --git a/test/e2e/placement.go b/test/e2e/placement.go index 5070c82342..d3c848fa21 100644 --- a/test/e2e/placement.go +++ b/test/e2e/placement.go @@ -129,7 +129,7 @@ var _ = Describe("Placement", func() { tl.Fatalf("Error creating resource client for %q: %v", targetKind, err) } err = wait.PollImmediate(framework.PollInterval, framework.TestContext.SingleCallTimeout, func() (bool, error) { - _, err := client.Resources(namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + _, err := client.Resources(namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) if errors.IsNotFound(err) { return true, nil } diff --git a/test/e2e/scheduling.go b/test/e2e/scheduling.go index ee1e87353d..62f1e9afb7 100644 --- a/test/e2e/scheduling.go +++ b/test/e2e/scheduling.go @@ -206,7 +206,7 @@ func createTestObjs(tl common.TestLogger, client genericclient.Client, typeConfi if err != nil { return "", err } - createdFedObject, err := federatedTypeClient.Resources(namespace).Create(fedObject, metav1.CreateOptions{}) + createdFedObject, err := federatedTypeClient.Resources(namespace).Create(context.Background(), fedObject, metav1.CreateOptions{}) if err != nil { return "", err } @@ -234,7 +234,7 @@ func deleteTestObj(typeConfig typeconfig.Interface, kubeConfig *restclient.Confi return err } - err = federatedTypeClient.Resources(namespace).Delete(name, &metav1.DeleteOptions{}) + err = federatedTypeClient.Resources(namespace).Delete(context.Background(), name, metav1.DeleteOptions{}) if err != nil { return err } @@ -258,7 +258,7 @@ func waitForMatchingFederatedObject(tl common.TestLogger, typeConfig typeconfig. expected64 := int32MapToInt64(expected32) return wait.PollImmediate(framework.PollInterval, framework.TestContext.SingleCallTimeout, func() (bool, error) { - fedObject, err := client.Resources(namespace).Get(name, metav1.GetOptions{}) + fedObject, err := client.Resources(namespace).Get(context.Background(), name, metav1.GetOptions{}) if err != nil { if !apierrors.IsNotFound(err) { tl.Errorf("An error occurred while polling for %s %s/%s: %v", kind, namespace, name, err) diff --git a/test/e2e/servicedns.go b/test/e2e/servicedns.go index 8dd0c7c073..0da939772d 100644 --- a/test/e2e/servicedns.go +++ b/test/e2e/servicedns.go @@ -195,7 +195,7 @@ func createClusterServiceAndEndpoints(f framework.KubeFedFramework, name, namesp common.WaitForNamespaceOrDie(framework.NewE2ELogger(), client, clusterName, namespace, framework.PollInterval, framework.TestContext.SingleCallTimeout) - createdService, err := client.CoreV1().Services(namespace).Create(service) + createdService, err := client.CoreV1().Services(namespace).Create(context.Background(), service, metav1.CreateOptions{}) framework.ExpectNoError(err, "Error creating service in cluster %q", clusterName) createdService.Status = apiv1.ServiceStatus{ @@ -203,11 +203,11 @@ func createClusterServiceAndEndpoints(f framework.KubeFedFramework, name, namesp } // Fake out provisioning LoadBalancer by updating the service status in member cluster. - _, err = client.CoreV1().Services(namespace).UpdateStatus(createdService) + _, err = client.CoreV1().Services(namespace).UpdateStatus(context.Background(), createdService, metav1.UpdateOptions{}) framework.ExpectNoError(err, "Error updating service status in cluster %q", clusterName) // Fake out pods backing service by creating endpoint in member cluster. - _, err = client.CoreV1().Endpoints(namespace).Create(endpoint) + _, err = client.CoreV1().Endpoints(namespace).Create(context.Background(), endpoint, metav1.CreateOptions{}) framework.ExpectNoError(err, "Error creating endpoint in cluster %q", clusterName) } diff --git a/test/e2e/version.go b/test/e2e/version.go index 1c7b08c19d..cc58b8df22 100644 --- a/test/e2e/version.go +++ b/test/e2e/version.go @@ -67,15 +67,15 @@ type testNamespacedVersionAdapter struct { func (a *testNamespacedVersionAdapter) CreateFederatedObject(obj pkgruntime.Object) (pkgruntime.Object, error) { configMap := obj.(*corev1.ConfigMap) - return a.kubeClient.CoreV1().ConfigMaps(configMap.Namespace).Create(configMap) + return a.kubeClient.CoreV1().ConfigMaps(configMap.Namespace).Create(context.Background(), configMap, metav1.CreateOptions{}) } func (a *testNamespacedVersionAdapter) DeleteFederatedObject(qualifiedName util.QualifiedName) error { - return a.kubeClient.CoreV1().ConfigMaps(qualifiedName.Namespace).Delete(qualifiedName.Name, nil) + return a.kubeClient.CoreV1().ConfigMaps(qualifiedName.Namespace).Delete(context.Background(), qualifiedName.Name, metav1.DeleteOptions{}) } func (a *testNamespacedVersionAdapter) GetFederatedObject(qualifiedName util.QualifiedName) (pkgruntime.Object, error) { - return a.kubeClient.CoreV1().ConfigMaps(qualifiedName.Namespace).Get(qualifiedName.Name, metav1.GetOptions{}) + return a.kubeClient.CoreV1().ConfigMaps(qualifiedName.Namespace).Get(context.Background(), qualifiedName.Name, metav1.GetOptions{}) } func (a *testNamespacedVersionAdapter) FederatedType() string { @@ -104,15 +104,15 @@ type testClusterVersionAdapter struct { func (a *testClusterVersionAdapter) CreateFederatedObject(obj pkgruntime.Object) (pkgruntime.Object, error) { role := obj.(*rbacv1.ClusterRole) - return a.kubeClient.RbacV1().ClusterRoles().Create(role) + return a.kubeClient.RbacV1().ClusterRoles().Create(context.Background(), role, metav1.CreateOptions{}) } func (a *testClusterVersionAdapter) DeleteFederatedObject(qualifiedName util.QualifiedName) error { - return a.kubeClient.RbacV1().ClusterRoles().Delete(qualifiedName.String(), nil) + return a.kubeClient.RbacV1().ClusterRoles().Delete(context.Background(), qualifiedName.String(), metav1.DeleteOptions{}) } func (a *testClusterVersionAdapter) GetFederatedObject(qualifiedName util.QualifiedName) (pkgruntime.Object, error) { - return a.kubeClient.RbacV1().ClusterRoles().Get(qualifiedName.String(), metav1.GetOptions{}) + return a.kubeClient.RbacV1().ClusterRoles().Get(context.Background(), qualifiedName.String(), metav1.GetOptions{}) } func (a *testClusterVersionAdapter) FederatedType() string { From d23af2d306b22abf797cdd0cc86a8aad9a1be1ae Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:50:30 +0100 Subject: [PATCH 04/14] chore: Upgrade ginkgo and gomega dependencies --- go.mod | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index 1fd8ee95be..1374ce569a 100644 --- a/go.mod +++ b/go.mod @@ -6,8 +6,8 @@ require ( github.com/evanphx/json-patch v4.5.0+incompatible github.com/ghodss/yaml v1.0.0 github.com/json-iterator/go v1.1.9 - github.com/onsi/ginkgo v1.12.0 - github.com/onsi/gomega v1.9.0 + github.com/onsi/ginkgo v1.13.0 + github.com/onsi/gomega v1.10.1 github.com/openshift/generic-admission-server v1.14.0 github.com/pborman/uuid v1.2.0 github.com/pkg/errors v0.9.1 From 40bfd07edb011624290624fbb4873cf5d1abd0ab Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:50:44 +0100 Subject: [PATCH 05/14] chore: Upgrade kubebuilder dependency --- scripts/download-binaries.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/download-binaries.sh b/scripts/download-binaries.sh index 6eb7d07391..aaf1f83c0a 100755 --- a/scripts/download-binaries.sh +++ b/scripts/download-binaries.sh @@ -43,7 +43,7 @@ mkdir -p "${dest_dir}" platform=$(uname -s|tr A-Z a-z) -kb_version="2.0.0" +kb_version="2.3.1" kb_tgz="kubebuilder_${kb_version}_${platform}_amd64.tar.gz" kb_url="https://github.com/kubernetes-sigs/kubebuilder/releases/download/v${kb_version}/${kb_tgz}" curl "${curl_args}O" "${kb_url}" \ From 1913276d0c385fd9b03f32f5cfd9c5e424fadf45 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:51:01 +0100 Subject: [PATCH 06/14] chore: Upgrade helm dependency --- scripts/download-binaries.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/download-binaries.sh b/scripts/download-binaries.sh index aaf1f83c0a..4a23365da2 100755 --- a/scripts/download-binaries.sh +++ b/scripts/download-binaries.sh @@ -53,7 +53,7 @@ curl "${curl_args}O" "${kb_url}" \ export KUBEBUILDER_ASSETS="${dest_dir}" echo "Setting to KUBEBUILDER_ASSETS ${dest_dir}" -helm_version="2.16.3" +helm_version="2.16.9" helm_tgz="helm-v${helm_version}-${platform}-amd64.tar.gz" helm_url="https://storage.googleapis.com/kubernetes-helm/$helm_tgz" curl "${curl_args}O" "${helm_url}" \ From efefd261c4a3ad7be5367c7d1ef83dc87edf5486 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:51:11 +0100 Subject: [PATCH 07/14] chore: Upgrade golangci-lint dependency --- scripts/download-binaries.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/download-binaries.sh b/scripts/download-binaries.sh index 4a23365da2..e4f21cab4a 100755 --- a/scripts/download-binaries.sh +++ b/scripts/download-binaries.sh @@ -62,7 +62,7 @@ curl "${curl_args}O" "${helm_url}" \ # TODO(marun) Update to newer version of golangci-lint when # https://github.com/golangci/golangci-lint/issues/483 is fixed. -golint_version="1.23.6" +golint_version="1.27.0" golint_dir="golangci-lint-${golint_version}-${platform}-amd64" golint_tgz="${golint_dir}.tar.gz" golint_url="https://github.com/golangci/golangci-lint/releases/download/v${golint_version}/${golint_tgz}" From 02a0e9724cba080cf4758ee1191a8df10ea9dbf5 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 16:51:53 +0100 Subject: [PATCH 08/14] chore: Upgrade go-bindata dependency --- scripts/download-binaries.sh | 4 +- test/common/bindata.go | 7 +- tools/go.mod | 4 +- tools/go.sum | 162 +++++++++++------------------------ 4 files changed, 57 insertions(+), 120 deletions(-) diff --git a/scripts/download-binaries.sh b/scripts/download-binaries.sh index e4f21cab4a..a0aea74053 100755 --- a/scripts/download-binaries.sh +++ b/scripts/download-binaries.sh @@ -73,9 +73,9 @@ curl "${curl_args}O" "${golint_url}" \ # Install go-bindata tool GOBIN="$(go env GOPATH)/bin" pushd ${root_dir}/tools -go install github.com/go-bindata/go-bindata/go-bindata +go install github.com/go-bindata/go-bindata/v3/go-bindata popd -ln -s ${GOBIN}/go-bindata ${dest_dir}/go-bindata +ln -sf ${GOBIN}/go-bindata ${dest_dir}/go-bindata echo "# destination:" echo "# ${dest_dir}" diff --git a/test/common/bindata.go b/test/common/bindata.go index e13928f2f5..41fd461ae8 100644 --- a/test/common/bindata.go +++ b/test/common/bindata.go @@ -14,8 +14,9 @@ See the License for the specific language governing permissions and limitations under the License. */ -// Code generated for package common by go-bindata DO NOT EDIT. (@generated) -// sources: +// Code generated by go-bindata. (@generated) DO NOT EDIT. + + //Package common generated by go-bindata.// sources: // test/common/fixtures/clusterroles.rbac.authorization.k8s.io.yaml // test/common/fixtures/configmaps.yaml // test/common/fixtures/deployments.apps.yaml @@ -74,7 +75,7 @@ func (fi bindataFileInfo) Mode() os.FileMode { return fi.mode } -// Mode return file modify time +// ModTime return file modify time func (fi bindataFileInfo) ModTime() time.Time { return fi.modTime } diff --git a/tools/go.mod b/tools/go.mod index a754a3f5f6..7a850c125e 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -3,6 +3,6 @@ module github.com/mesosphere/kommander-cluster-lifecycle/tools go 1.13 require ( - github.com/go-bindata/go-bindata v3.1.2+incompatible - sigs.k8s.io/controller-tools v0.2.5 + github.com/go-bindata/go-bindata/v3 v3.1.3 + sigs.k8s.io/controller-tools v0.3.0 ) diff --git a/tools/go.sum b/tools/go.sum index 04f6a95323..a4eab05225 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -10,9 +10,6 @@ github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxB github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= @@ -30,11 +27,10 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/blang/semver v3.5.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= -github.com/coreos/bbolt v1.3.1-coreos.6/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/etcd v3.3.15+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -45,7 +41,6 @@ github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfc github.com/coreos/pkg v0.0.0-20180108230652-97fdf19511ea/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= @@ -55,9 +50,11 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= +github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= @@ -66,8 +63,8 @@ github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2H github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= -github.com/go-bindata/go-bindata v3.1.2+incompatible h1:5vjJMVhowQdPzjE1LdxyFF7YFTXg5IgGVW4gBr5IbvE= -github.com/go-bindata/go-bindata v3.1.2+incompatible/go.mod h1:xK8Dsgwmeed+BBsSy2XTopBn/8uK2HWuGSnA11C3Joo= +github.com/go-bindata/go-bindata/v3 v3.1.3 h1:F0nVttLC3ws0ojc7p60veTurcOm//D4QBODNM7EGrCI= +github.com/go-bindata/go-bindata/v3 v3.1.3/go.mod h1:1/zrpXsLD8YDIbhZRqXzm1Ghc7NhEvIN9+Z6R5/xH4I= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= @@ -115,18 +112,12 @@ github.com/go-openapi/validate v0.18.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+ github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2KDnRCRMUi7GTA= github.com/go-openapi/validate v0.19.5/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= -github.com/gobuffalo/envy v1.7.1/go.mod h1:FurDp9+EDPE4aIUS3ZLyD+7/9fpx7YRt/ukY6jIHf0w= -github.com/gobuffalo/flect v0.1.5/go.mod h1:W3K3X9ksuZfir8f/LrfVtWmCDQFfayuylOJ7sz/Fj80= github.com/gobuffalo/flect v0.2.0 h1:EWCvMGGxOjsgwlWaP+f4+Hh6yrrte7JeFL2S6b+0hdM= github.com/gobuffalo/flect v0.2.0/go.mod h1:W3K3X9ksuZfir8f/LrfVtWmCDQFfayuylOJ7sz/Fj80= -github.com/gobuffalo/logger v1.0.1/go.mod h1:2zbswyIUa45I+c+FLXuWl9zSWEiVuthsk8ze5s8JvPs= -github.com/gobuffalo/packd v0.3.0/go.mod h1:zC7QkmNkYVGKPw4tHpBQ+ml7W/3tIebgeo1b36chA3Q= -github.com/gobuffalo/packr/v2 v2.7.1/go.mod h1:qYEvAazPaVxy7Y7KR0W8qYEE+RymX74kETFqjFoFlOc= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d h1:3PaI8p3seN09VjbTYC/QWlUZdZ1qS1zGjy7LH2Wt07I= -github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= @@ -139,52 +130,40 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= -github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= +github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= +github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/gregjones/httpcache v0.0.0-20170728041850-787624de3eb7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/go-grpc-middleware v0.0.0-20190222133341-cfaf5686ec79/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.3.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-retryablehttp v0.6.4/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/helm/chart-testing v2.4.0+incompatible/go.mod h1:wiZWpIbHj0WZlK79WZJt19Kt/L2txG2+haKcDZoyXuc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.8 h1:QiWkFLKq0T7mpzwOTu6BzNDbfTE8OLrYhVKYMLF46Ok= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.2-0.20191008195320-984a47ca6b0a/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -208,9 +187,10 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5 github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180320133207-05fbef0ca5da/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= @@ -219,41 +199,31 @@ github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+ github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= -github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.3.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.4.0/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/soheilhy/cmux v0.1.3/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= @@ -271,7 +241,6 @@ github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DM github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -280,7 +249,6 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1 github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= -github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= @@ -289,11 +257,8 @@ go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qL go.mongodb.org/mongo-driver v1.1.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.mongodb.org/mongo-driver v1.1.2/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.uber.org/atomic v0.0.0-20181018215023-8dc6146f7569/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/multierr v0.0.0-20180122172545-ddea229ff1df/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/zap v0.0.0-20180814183419-67bc79d13d15/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -302,24 +267,19 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190617133340-57b3e21c3d56/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190312203227-4b39c73a6495/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -328,7 +288,6 @@ golang.org/x/net v0.0.0-20190320064053-1272bf9dcd53/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190812203447-cdfb69ac37fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191004110552-13f9640d40b9 h1:rjwSpXsdiK0dV8/Naq3kAw9ymfAeJIyd0upUIElB+lI= @@ -352,14 +311,13 @@ golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190321052220-f7bb7a8bee54/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190515120540-06a5c4944438/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7 h1:HmbHVPwrPEKPGLAcHSrMe6+hqSUlvZU0rab6x5EXfGU= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -374,23 +332,16 @@ golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72 h1:bw9doJza/SFBEweII/rHQh338oozWyiFsBRHtrflcws= golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191004055002-72853e10c5a3/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191213032237-7093a17b0467/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f h1:kDxGY2VmgABOe55qheT/TFqUMtcTHnomIPS1iv3G4Ms= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0= -gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= -gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -398,17 +349,16 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inf.v0 v0.9.0/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= @@ -418,54 +368,40 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWD gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20190905181640-827449938966 h1:B0J02caTR6tpSJozBJyiAzT6CtBzjclw4pgm9gg8Ys0= gopkg.in/yaml.v3 v3.0.0-20190905181640-827449938966/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -k8s.io/api v0.0.0-20190918155943-95b840bb6a1f/go.mod h1:uWuOHnjmNrtQomJrvEBg0c0HRNyQ+8KTEERVsK0PW48= -k8s.io/api v0.17.0 h1:H9d/lw+VkZKEVIUc8F3wgiQ+FUXTTr21M87jXLU7yqM= -k8s.io/api v0.17.0/go.mod h1:npsyOePkeP0CPwyGfXDHxvypiYMJxBWAMpQxCaJ4ZxI= -k8s.io/apiextensions-apiserver v0.0.0-20190918161926-8f644eb6e783/go.mod h1:xvae1SZB3E17UpV59AWc271W/Ph25N+bjPyR63X6tPY= -k8s.io/apiextensions-apiserver v0.17.0 h1:+XgcGxqaMztkbbvsORgCmHIb4uImHKvTjNyu7b8gRnA= -k8s.io/apiextensions-apiserver v0.17.0/go.mod h1:XiIFUakZywkUl54fVXa7QTEHcqQz9HG55nHd1DCoHj8= -k8s.io/apimachinery v0.0.0-20190913080033-27d36303b655/go.mod h1:nL6pwRT8NgfF8TT68DBI8uEePRt89cSvoXUVqbkWHq4= -k8s.io/apimachinery v0.17.0 h1:xRBnuie9rXcPxUkDizUsGvPf1cnlZCFu210op7J7LJo= -k8s.io/apimachinery v0.17.0/go.mod h1:b9qmWdKlLuU9EBh+06BtLcSf/Mu89rWL33naRxs1uZg= -k8s.io/apiserver v0.0.0-20190918160949-bfa5e2e684ad/go.mod h1:XPCXEwhjaFN29a8NldXA901ElnKeKLrLtREO9ZhFyhg= -k8s.io/apiserver v0.17.0/go.mod h1:ABM+9x/prjINN6iiffRVNCBR2Wk7uY4z+EtEGZD48cg= -k8s.io/client-go v0.0.0-20190918160344-1fbdaa4c8d90/go.mod h1:J69/JveO6XESwVgG53q3Uz5OSfgsv4uxpScmmyYOOlk= -k8s.io/client-go v0.17.0/go.mod h1:TYgR6EUHs6k45hb6KWjVD6jFZvJV4gHDikv/It0xz+k= -k8s.io/code-generator v0.0.0-20190912054826-cd179ad6a269/go.mod h1:V5BD6M4CyaN5m+VthcclXWsVcT1Hu+glwa1bi3MIsyE= -k8s.io/code-generator v0.17.0/go.mod h1:DVmfPQgxQENqDIzVR2ddLXMH34qeszkKSdH/N+s+38s= -k8s.io/component-base v0.0.0-20190918160511-547f6c5d7090/go.mod h1:933PBGtQFJky3TEwYx4aEPZ4IxqhWh3R6DCmzqIn1hA= -k8s.io/component-base v0.17.0/go.mod h1:rKuRAokNMY2nn2A6LP/MiwpoaMRHpfRnrPaUJJj1Yoc= +k8s.io/api v0.18.2 h1:wG5g5ZmSVgm5B+eHMIbI9EGATS2L8Z72rda19RIEgY8= +k8s.io/api v0.18.2/go.mod h1:SJCWI7OLzhZSvbY7U8zwNl9UA4o1fizoug34OV/2r78= +k8s.io/apiextensions-apiserver v0.18.2 h1:I4v3/jAuQC+89L3Z7dDgAiN4EOjN6sbm6iBqQwHTah8= +k8s.io/apiextensions-apiserver v0.18.2/go.mod h1:q3faSnRGmYimiocj6cHQ1I3WpLqmDgJFlKL37fC4ZvY= +k8s.io/apimachinery v0.18.2 h1:44CmtbmkzVDAhCpRVSiP2R5PPrC2RtlIv/MoB8xpdRA= +k8s.io/apimachinery v0.18.2/go.mod h1:9SnR/e11v5IbyPCGbvJViimtJ0SwHG4nfZFjU77ftcA= +k8s.io/apiserver v0.18.2/go.mod h1:Xbh066NqrZO8cbsoenCwyDJ1OSi8Ag8I2lezeHxzwzw= +k8s.io/client-go v0.18.2/go.mod h1:Xcm5wVGXX9HAA2JJ2sSBUn3tCJ+4SVlCbl2MNNv+CIU= +k8s.io/code-generator v0.18.2/go.mod h1:+UHX5rSbxmR8kzS+FAv7um6dtYrZokQvjHpDSYRVkTc= +k8s.io/component-base v0.18.2/go.mod h1:kqLlMuhJNHQ9lz8Z7V5bxUUtjFZnrypArGl58gmDfUM= k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/gengo v0.0.0-20190822140433-26a664648505/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= +k8s.io/gengo v0.0.0-20200114144118-36b2048a9120/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v0.4.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= -k8s.io/kube-openapi v0.0.0-20190816220812-743ec37842bf/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E= -k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E= -k8s.io/utils v0.0.0-20190801114015-581e00157fb1/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= -k8s.io/utils v0.0.0-20191114184206-e782cd3c129f h1:GiPwtSzdP43eI1hpPCbROQCCIgCuiMMNF8YUVLF3vJo= -k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= -modernc.org/cc v1.0.0/go.mod h1:1Sk4//wdnYJiUIxnW8ddKpaOJCF37yAdqYnkxUpaYxw= -modernc.org/golex v1.0.0/go.mod h1:b/QX9oBD/LhixY6NDh+IdGv17hgB+51fET1i2kPSmvk= -modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03k= -modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs= -modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I= -mvdan.cc/sh v2.6.4+incompatible/go.mod h1:IeeQbZq+x2SUGBensq/jge5lLQbS3XT2ktyp3wrt4x8= -sigs.k8s.io/controller-tools v0.2.4/go.mod h1:m/ztfQNocGYBgTTCmFdnK94uVvgxeZeE3LtJvd/jIzA= -sigs.k8s.io/controller-tools v0.2.5 h1:kH7HKWed9XO42OTxyhUtqyImiefdZV2Q9Jbrytvhf18= -sigs.k8s.io/controller-tools v0.2.5/go.mod h1:+t0Hz6tOhJQCdd7IYO0mNzimmiM9sqMU0021u6UCF2o= -sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI= -sigs.k8s.io/structured-merge-diff v0.0.0-20190817042607-6149e4549fca/go.mod h1:IIgPezJWb76P0hotTxzDbWsMYB8APh18qZnxkomBpxA= -sigs.k8s.io/structured-merge-diff v1.0.1-0.20191108220359-b1b620dd3f06/go.mod h1:/ULNhyfzRopfcjskuui0cTITekDduZ7ycKN3oUT9R18= -sigs.k8s.io/yaml v1.1.0 h1:4A07+ZFc2wgJwo8YNlQpr1rVlgUDlxXHhPJciaPY5gs= +k8s.io/kube-openapi v0.0.0-20200121204235-bf4fb3bd569c/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E= +k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89 h1:d4vVOjXm687F1iLSP2q3lyPPuyvTUt3aVoBpi2DqRsU= +k8s.io/utils v0.0.0-20200324210504-a9aa75ae1b89/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= +sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.7/go.mod h1:PHgbrJT7lCHcxMU+mDHEm+nx46H4zuuHZkDP6icnhu0= +sigs.k8s.io/controller-tools v0.3.0 h1:y3YD99XOyWaXkiF1kd41uRvfp/64teWcrEZFuHxPhJ4= +sigs.k8s.io/controller-tools v0.3.0/go.mod h1:enhtKGfxZD1GFEoMgP8Fdbu+uKQ/cq1/WGJhdVChfvI= +sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= +sigs.k8s.io/structured-merge-diff/v3 v3.0.0 h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E= +sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= +sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= +sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= From 0e9810a824e81d48e414b795d6435b1acd06fda8 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 17:49:46 +0100 Subject: [PATCH 09/14] chore: Update repo-infra verify_boilerplate.py --- scripts/pre-commit.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pre-commit.sh b/scripts/pre-commit.sh index 0197a7eb63..18cbb4d8a4 100755 --- a/scripts/pre-commit.sh +++ b/scripts/pre-commit.sh @@ -124,7 +124,7 @@ echo "Verifying Gofmt" ./hack/go-tools/verify-gofmt.sh echo "Checking boilerplate text" -./vendor/repo-infra/verify/verify-boilerplate.sh --rootdir="${ROOT_DIR}" -v +./vendor/repo-infra/hack/verify_boilerplate.py --rootdir="${ROOT_DIR}" echo "Linting" golangci-lint run --timeout=5m From c7aeb7e73892d25a8a0ffd29193dea70054b992f Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 17:55:14 +0100 Subject: [PATCH 10/14] chore: Fix golangci-lint issues --- pkg/controller/kubefedcluster/clusterclient.go | 2 +- pkg/controller/kubefedcluster/controller.go | 3 +-- pkg/controller/schedulingpreference/controller.go | 4 ++-- pkg/controller/sync/version/manager.go | 5 +---- scripts/download-binaries.sh | 2 -- 5 files changed, 5 insertions(+), 11 deletions(-) diff --git a/pkg/controller/kubefedcluster/clusterclient.go b/pkg/controller/kubefedcluster/clusterclient.go index 3fc98deeb6..7fcac91995 100644 --- a/pkg/controller/kubefedcluster/clusterclient.go +++ b/pkg/controller/kubefedcluster/clusterclient.go @@ -71,7 +71,7 @@ func NewClusterClientSet(c *fedv1b1.KubeFedCluster, client generic.Client, fedNa if err != nil { return nil, err } - clusterConfig.Timeout = timeout + clusterConfig.Timeout = timeout //nolint:staticcheck var clusterClientSet = ClusterClient{clusterName: c.Name} if clusterConfig != nil { clusterClientSet.kubeClient = kubeclientset.NewForConfigOrDie((restclient.AddUserAgent(clusterConfig, UserAgentName))) diff --git a/pkg/controller/kubefedcluster/controller.go b/pkg/controller/kubefedcluster/controller.go index e61c42ccbb..9a56ffe9fd 100644 --- a/pkg/controller/kubefedcluster/controller.go +++ b/pkg/controller/kubefedcluster/controller.go @@ -18,7 +18,6 @@ package kubefedcluster import ( "context" - "fmt" "sync" "time" @@ -111,7 +110,7 @@ func newClusterController(config *util.ControllerConfig, clusterHealthCheckConfi kubeClient := kubeclient.NewForConfigOrDie(kubeConfig) broadcaster := record.NewBroadcaster() broadcaster.StartRecordingToSink(&typedcorev1.EventSinkImpl{Interface: kubeClient.CoreV1().Events("")}) - recorder := broadcaster.NewRecorder(genscheme.Scheme, corev1.EventSource{Component: fmt.Sprintf("kubefedcluster-controller")}) + recorder := broadcaster.NewRecorder(genscheme.Scheme, corev1.EventSource{Component: "kubefedcluster-controller"}) cc.eventRecorder = recorder var err error diff --git a/pkg/controller/schedulingpreference/controller.go b/pkg/controller/schedulingpreference/controller.go index 820870b257..a8a5675e7e 100644 --- a/pkg/controller/schedulingpreference/controller.go +++ b/pkg/controller/schedulingpreference/controller.go @@ -78,7 +78,7 @@ func StartSchedulingPreferenceController(config *util.ControllerConfig, scheduli if config.MinimizeLatency { controller.minimizeLatency() } - klog.Infof(fmt.Sprintf("Starting replicaschedulingpreferences controller")) + klog.Infof("Starting replicaschedulingpreferences controller") controller.Run(stopChannel) return controller.scheduler, nil } @@ -95,7 +95,7 @@ func newSchedulingPreferenceController(config *util.ControllerConfig, scheduling broadcaster := record.NewBroadcaster() broadcaster.StartRecordingToSink(&typedcorev1.EventSinkImpl{Interface: kubeClient.CoreV1().Events("")}) - recorder := broadcaster.NewRecorder(scheme.Scheme, corev1.EventSource{Component: fmt.Sprintf("replicaschedulingpreference-controller")}) + recorder := broadcaster.NewRecorder(scheme.Scheme, corev1.EventSource{Component: "replicaschedulingpreference-controller"}) s := &SchedulingPreferenceController{ clusterAvailableDelay: config.ClusterAvailableDelay, diff --git a/pkg/controller/sync/version/manager.go b/pkg/controller/sync/version/manager.go index bef5f37483..595953b760 100644 --- a/pkg/controller/sync/version/manager.go +++ b/pkg/controller/sync/version/manager.go @@ -88,10 +88,7 @@ func (m *VersionManager) Sync(stopChan <-chan struct{}) { if !ok { return } - ok = m.load(versionList, stopChan) - if !ok { - return - } + m.load(versionList, stopChan) } // HasSynced indicates whether the manager's in-memory state has been diff --git a/scripts/download-binaries.sh b/scripts/download-binaries.sh index a0aea74053..765309ab1f 100755 --- a/scripts/download-binaries.sh +++ b/scripts/download-binaries.sh @@ -60,8 +60,6 @@ curl "${curl_args}O" "${helm_url}" \ && tar xzfp "${helm_tgz}" -C "${dest_dir}" --strip-components=1 "${platform}-amd64/helm" \ && rm "${helm_tgz}" -# TODO(marun) Update to newer version of golangci-lint when -# https://github.com/golangci/golangci-lint/issues/483 is fixed. golint_version="1.27.0" golint_dir="golangci-lint-${golint_version}-${platform}-amd64" golint_tgz="${golint_dir}.tar.gz" From c425e3942067603fc6bf8f29a89f031a165dfa3d Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Thu, 2 Jul 2020 20:30:48 +0100 Subject: [PATCH 11/14] fix: Do not federate endpointslices in tests --- scripts/deploy-federated-nginx.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/deploy-federated-nginx.sh b/scripts/deploy-federated-nginx.sh index 2d075055eb..137941e816 100755 --- a/scripts/deploy-federated-nginx.sh +++ b/scripts/deploy-federated-nginx.sh @@ -47,7 +47,7 @@ kubectl apply -n ${TEST_NS} -f example/sample1/service.yaml # Federate kubernetes resources to member clusters echo echo "Federating resources in namespace ${TEST_NS} to member clusters." -kubefedctl federate ns ${TEST_NS} --contents --skip-api-resources 'pods,secrets,serviceaccount,replicaset' +kubefedctl federate ns ${TEST_NS} --contents --skip-api-resources 'pods,secrets,serviceaccounts,replicasets,endpointslices' echo echo "Checking status of federated resources." From 99be227150e101f3b9588f3f88241f9bd32c012e Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Fri, 3 Jul 2020 10:59:11 +0100 Subject: [PATCH 12/14] chore: Remove repo-infra to relocate subtree --- vendor/repo-infra/.bazelrc | 4 - vendor/repo-infra/.gitignore | 7 - vendor/repo-infra/.kazelcfg.json | 3 - vendor/repo-infra/.travis.yml | 29 - vendor/repo-infra/BUILD.bazel | 8 - vendor/repo-infra/CONTRIBUTING.md | 9 - vendor/repo-infra/Gopkg.lock | 69 - vendor/repo-infra/Gopkg.toml | 30 - vendor/repo-infra/LICENSE | 201 --- vendor/repo-infra/OWNERS | 4 - vendor/repo-infra/README.md | 65 - vendor/repo-infra/SECURITY_CONTACTS | 14 - vendor/repo-infra/WORKSPACE | 21 - vendor/repo-infra/code-of-conduct.md | 3 - vendor/repo-infra/defs/BUILD.bazel | 43 - vendor/repo-infra/defs/build.bzl | 192 --- vendor/repo-infra/defs/deb.bzl | 62 - vendor/repo-infra/defs/diff_test.sh | 29 - vendor/repo-infra/defs/gcs_uploader.py | 92 - vendor/repo-infra/defs/go.bzl | 145 -- vendor/repo-infra/defs/pkg.bzl | 29 - vendor/repo-infra/defs/rpm.bzl | 41 - vendor/repo-infra/defs/run_in_workspace.bzl | 90 - vendor/repo-infra/defs/testdata/testfile.txt | 1 - .../defs/testdata/testfile.txt.md5.expected | 1 - .../defs/testdata/testfile.txt.sha1.expected | 1 - .../testdata/testfile.txt.sha512.expected | 1 - vendor/repo-infra/kazel/BUILD.bazel | 41 - vendor/repo-infra/kazel/README.rst | 90 - vendor/repo-infra/kazel/config.go | 70 - vendor/repo-infra/kazel/diff.go | 60 - vendor/repo-infra/kazel/generator.go | 178 -- vendor/repo-infra/kazel/generator_test.go | 122 -- vendor/repo-infra/kazel/kazel.go | 389 ----- vendor/repo-infra/kazel/kazel_test.go | 76 - vendor/repo-infra/kazel/sourcerer.go | 109 -- vendor/repo-infra/tools/BUILD.bazel | 65 - vendor/repo-infra/tools/CROSSTOOL | 512 ------ vendor/repo-infra/tools/build_tar/BUILD.bazel | 18 - vendor/repo-infra/tools/build_tar/buildtar.go | 611 ------- .../tools/generate_crosstool/BUILD.bazel | 26 - .../tools/generate_crosstool/main.go | 238 --- vendor/repo-infra/vendor/BUILD.bazel | 0 .../bazelbuild/bazel-gazelle/AUTHORS | 18 - .../bazelbuild/bazel-gazelle/CONTRIBUTORS | 29 - .../bazelbuild/bazel-gazelle/LICENSE | 202 --- .../bazel-gazelle/cmd/gazelle/BUILD.bazel | 39 - .../bazel-gazelle/cmd/gazelle/diff.go | 83 - .../bazel-gazelle/cmd/gazelle/fix-update.go | 449 ----- .../bazel-gazelle/cmd/gazelle/fix.go | 33 - .../bazel-gazelle/cmd/gazelle/gazelle.go | 120 -- .../bazel-gazelle/cmd/gazelle/langs.go | 27 - .../bazel-gazelle/cmd/gazelle/print.go | 29 - .../bazel-gazelle/cmd/gazelle/update-repos.go | 200 --- .../bazel-gazelle/cmd/gazelle/version.go | 65 - .../bazel-gazelle/config/BUILD.bazel | 16 - .../bazelbuild/bazel-gazelle/config/config.go | 210 --- .../bazel-gazelle/config/constants.go | 27 - .../bazelbuild/bazel-gazelle/flag/BUILD.bazel | 9 - .../bazelbuild/bazel-gazelle/flag/flag.go | 62 - .../internal/version/BUILD.bazel | 9 - .../bazel-gazelle/internal/version/version.go | 72 - .../bazel-gazelle/internal/wspace/BUILD.bazel | 9 - .../bazel-gazelle/internal/wspace/finder.go | 45 - .../bazel-gazelle/label/BUILD.bazel | 10 - .../bazelbuild/bazel-gazelle/label/label.go | 201 --- .../bazel-gazelle/language/BUILD.bazel | 14 - .../bazel-gazelle/language/go/BUILD.bazel | 34 - .../bazel-gazelle/language/go/config.go | 285 --- .../bazel-gazelle/language/go/constants.go | 44 - .../bazel-gazelle/language/go/fileinfo.go | 681 -------- .../bazel-gazelle/language/go/fix.go | 253 --- .../bazel-gazelle/language/go/generate.go | 576 ------- .../bazel-gazelle/language/go/kinds.go | 147 -- .../language/go/known_go_imports.go | 157 -- .../language/go/known_proto_imports.go | 365 ---- .../bazel-gazelle/language/go/lang.go | 70 - .../bazel-gazelle/language/go/package.go | 488 ------ .../bazel-gazelle/language/go/resolve.go | 335 ---- .../language/go/std_package_list.go | 284 --- .../bazelbuild/bazel-gazelle/language/lang.go | 151 -- .../bazel-gazelle/language/proto/BUILD.bazel | 28 - .../bazel-gazelle/language/proto/config.go | 256 --- .../bazel-gazelle/language/proto/constants.go | 27 - .../bazel-gazelle/language/proto/fileinfo.go | 138 -- .../bazel-gazelle/language/proto/fix.go | 24 - .../bazel-gazelle/language/proto/generate.go | 285 --- .../bazel-gazelle/language/proto/kinds.go | 29 - .../language/proto/known_imports.go | 366 ---- .../bazel-gazelle/language/proto/lang.go | 72 - .../bazel-gazelle/language/proto/package.go | 55 - .../bazel-gazelle/language/proto/resolve.go | 125 -- .../bazel-gazelle/merger/BUILD.bazel | 13 - .../bazelbuild/bazel-gazelle/merger/fix.go | 199 --- .../bazelbuild/bazel-gazelle/merger/merger.go | 250 --- .../bazel-gazelle/pathtools/BUILD.bazel | 9 - .../bazel-gazelle/pathtools/path.go | 67 - .../bazelbuild/bazel-gazelle/repo/BUILD.bazel | 21 - .../bazelbuild/bazel-gazelle/repo/dep.go | 55 - .../bazelbuild/bazel-gazelle/repo/modules.go | 145 -- .../bazelbuild/bazel-gazelle/repo/remote.go | 332 ---- .../bazelbuild/bazel-gazelle/repo/repo.go | 199 --- .../bazel-gazelle/resolve/BUILD.bazel | 18 - .../bazel-gazelle/resolve/config.go | 115 -- .../bazelbuild/bazel-gazelle/resolve/index.go | 243 --- .../bazelbuild/bazel-gazelle/rule/BUILD.bazel | 24 - .../bazel-gazelle/rule/directives.go | 64 - .../bazelbuild/bazel-gazelle/rule/expr.go | 354 ---- .../bazelbuild/bazel-gazelle/rule/merge.go | 489 ------ .../bazelbuild/bazel-gazelle/rule/platform.go | 128 -- .../bazel-gazelle/rule/platform_strings.go | 192 --- .../bazelbuild/bazel-gazelle/rule/rule.go | 692 -------- .../bazel-gazelle/rule/sort_labels.go | 114 -- .../bazelbuild/bazel-gazelle/rule/types.go | 56 - .../bazelbuild/bazel-gazelle/rule/value.go | 184 -- .../bazelbuild/bazel-gazelle/walk/BUILD.bazel | 18 - .../bazelbuild/bazel-gazelle/walk/config.go | 83 - .../bazelbuild/bazel-gazelle/walk/walk.go | 328 ---- .../bazelbuild/buildtools/CONTRIBUTORS | 15 - .../github.com/bazelbuild/buildtools/LICENSE | 13 - .../bazelbuild/buildtools/build/BUILD.bazel | 19 - .../bazelbuild/buildtools/build/lex.go | 870 ---------- .../bazelbuild/buildtools/build/parse.y.go | 1531 ----------------- .../bazelbuild/buildtools/build/print.go | 719 -------- .../bazelbuild/buildtools/build/quote.go | 262 --- .../bazelbuild/buildtools/build/rewrite.go | 817 --------- .../bazelbuild/buildtools/build/rule.go | 315 ---- .../bazelbuild/buildtools/build/syntax.go | 495 ------ .../bazelbuild/buildtools/build/walk.go | 135 -- .../bazelbuild/buildtools/tables/BUILD.bazel | 12 - .../buildtools/tables/jsonparser.go | 63 - .../bazelbuild/buildtools/tables/tables.go | 248 --- .../github.com/pelletier/go-toml/BUILD.bazel | 20 - .../github.com/pelletier/go-toml/LICENSE | 21 - .../github.com/pelletier/go-toml/doc.go | 23 - .../github.com/pelletier/go-toml/fuzz.go | 31 - .../pelletier/go-toml/keysparsing.go | 85 - .../github.com/pelletier/go-toml/lexer.go | 750 -------- .../github.com/pelletier/go-toml/marshal.go | 600 ------- .../github.com/pelletier/go-toml/parser.go | 430 ----- .../github.com/pelletier/go-toml/position.go | 29 - .../github.com/pelletier/go-toml/token.go | 144 -- .../github.com/pelletier/go-toml/toml.go | 309 ---- .../pelletier/go-toml/tomltree_create.go | 142 -- .../pelletier/go-toml/tomltree_write.go | 289 ---- .../github.com/pmezard/go-difflib/LICENSE | 27 - .../pmezard/go-difflib/difflib/BUILD.bazel | 9 - .../pmezard/go-difflib/difflib/difflib.go | 772 --------- .../vendor/golang.org/x/build/AUTHORS | 3 - .../vendor/golang.org/x/build/CONTRIBUTORS | 3 - .../vendor/golang.org/x/build/LICENSE | 27 - .../vendor/golang.org/x/build/PATENTS | 22 - .../golang.org/x/build/pargzip/BUILD.bazel | 9 - .../golang.org/x/build/pargzip/pargzip.go | 200 --- .../vendor/golang.org/x/tools/AUTHORS | 3 - .../vendor/golang.org/x/tools/CONTRIBUTORS | 3 - .../vendor/golang.org/x/tools/LICENSE | 27 - .../vendor/golang.org/x/tools/PATENTS | 22 - .../golang.org/x/tools/cmd/getgo/LICENSE | 27 - .../golang.org/x/tools/go/vcs/BUILD.bazel | 14 - .../golang.org/x/tools/go/vcs/discovery.go | 76 - .../vendor/golang.org/x/tools/go/vcs/env.go | 39 - .../vendor/golang.org/x/tools/go/vcs/http.go | 80 - .../vendor/golang.org/x/tools/go/vcs/vcs.go | 755 -------- .../x/tools/third_party/moduleloader/LICENSE | 22 - .../x/tools/third_party/typescript/LICENSE | 55 - .../x/tools/third_party/webcomponents/LICENSE | 27 - .../repo-infra/vendor/k8s.io/klog/BUILD.bazel | 12 - vendor/repo-infra/vendor/k8s.io/klog/LICENSE | 191 -- vendor/repo-infra/vendor/k8s.io/klog/klog.go | 1239 ------------- .../vendor/k8s.io/klog/klog_file.go | 126 -- vendor/repo-infra/verify/BUILD.bazel | 20 - vendor/repo-infra/verify/README.md | 52 - .../repo-infra/verify/boilerplate/BUILD.bazel | 3 - .../boilerplate/boilerplate.Dockerfile.txt | 14 - .../boilerplate/boilerplate.Makefile.txt | 14 - .../verify/boilerplate/boilerplate.bzl.txt | 14 - .../verify/boilerplate/boilerplate.go.txt | 16 - .../verify/boilerplate/boilerplate.py | 202 --- .../verify/boilerplate/boilerplate.py.txt | 14 - .../verify/boilerplate/boilerplate.sh.txt | 14 - .../verify/boilerplate/boilerplate_test.py | 52 - .../verify/boilerplate/test/BUILD.bazel | 17 - .../verify/boilerplate/test/fail.go | 19 - .../verify/boilerplate/test/fail.py | 17 - .../verify/boilerplate/test/pass.go | 17 - .../verify/boilerplate/test/pass.py | 17 - .../verify/go-tools/verify-gofmt.sh | 34 - .../verify/go-tools/verify-gometalinter.sh | 33 - .../verify/go-tools/verify-govet.sh | 20 - .../verify/go_install_from_commit.sh | 28 - vendor/repo-infra/verify/update-bazel.sh | 34 - vendor/repo-infra/verify/verify-bazel.sh | 44 - .../repo-infra/verify/verify-boilerplate.sh | 56 - vendor/repo-infra/verify/verify-crosstool.sh | 33 - vendor/repo-infra/verify/verify-errexit.sh | 48 - vendor/repo-infra/verify/verify-go-src.sh | 111 -- 197 files changed, 28564 deletions(-) delete mode 100644 vendor/repo-infra/.bazelrc delete mode 100644 vendor/repo-infra/.gitignore delete mode 100644 vendor/repo-infra/.kazelcfg.json delete mode 100644 vendor/repo-infra/.travis.yml delete mode 100644 vendor/repo-infra/BUILD.bazel delete mode 100644 vendor/repo-infra/CONTRIBUTING.md delete mode 100644 vendor/repo-infra/Gopkg.lock delete mode 100644 vendor/repo-infra/Gopkg.toml delete mode 100644 vendor/repo-infra/LICENSE delete mode 100644 vendor/repo-infra/OWNERS delete mode 100644 vendor/repo-infra/README.md delete mode 100644 vendor/repo-infra/SECURITY_CONTACTS delete mode 100644 vendor/repo-infra/WORKSPACE delete mode 100644 vendor/repo-infra/code-of-conduct.md delete mode 100644 vendor/repo-infra/defs/BUILD.bazel delete mode 100644 vendor/repo-infra/defs/build.bzl delete mode 100644 vendor/repo-infra/defs/deb.bzl delete mode 100755 vendor/repo-infra/defs/diff_test.sh delete mode 100644 vendor/repo-infra/defs/gcs_uploader.py delete mode 100644 vendor/repo-infra/defs/go.bzl delete mode 100644 vendor/repo-infra/defs/pkg.bzl delete mode 100644 vendor/repo-infra/defs/rpm.bzl delete mode 100644 vendor/repo-infra/defs/run_in_workspace.bzl delete mode 100644 vendor/repo-infra/defs/testdata/testfile.txt delete mode 100644 vendor/repo-infra/defs/testdata/testfile.txt.md5.expected delete mode 100644 vendor/repo-infra/defs/testdata/testfile.txt.sha1.expected delete mode 100644 vendor/repo-infra/defs/testdata/testfile.txt.sha512.expected delete mode 100644 vendor/repo-infra/kazel/BUILD.bazel delete mode 100644 vendor/repo-infra/kazel/README.rst delete mode 100644 vendor/repo-infra/kazel/config.go delete mode 100644 vendor/repo-infra/kazel/diff.go delete mode 100644 vendor/repo-infra/kazel/generator.go delete mode 100644 vendor/repo-infra/kazel/generator_test.go delete mode 100644 vendor/repo-infra/kazel/kazel.go delete mode 100644 vendor/repo-infra/kazel/kazel_test.go delete mode 100644 vendor/repo-infra/kazel/sourcerer.go delete mode 100644 vendor/repo-infra/tools/BUILD.bazel delete mode 100755 vendor/repo-infra/tools/CROSSTOOL delete mode 100644 vendor/repo-infra/tools/build_tar/BUILD.bazel delete mode 100644 vendor/repo-infra/tools/build_tar/buildtar.go delete mode 100644 vendor/repo-infra/tools/generate_crosstool/BUILD.bazel delete mode 100644 vendor/repo-infra/tools/generate_crosstool/main.go delete mode 100644 vendor/repo-infra/vendor/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/AUTHORS delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/CONTRIBUTORS delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/LICENSE delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/diff.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix-update.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/gazelle.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/langs.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/print.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/update-repos.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/version.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/config.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/constants.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/flag.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/version.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/finder.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/label.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/config.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/constants.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fileinfo.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fix.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/generate.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/kinds.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_go_imports.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_proto_imports.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/lang.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/package.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/resolve.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/std_package_list.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/lang.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/config.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/constants.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fileinfo.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fix.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/generate.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/kinds.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/known_imports.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/lang.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/package.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/resolve.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/fix.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/merger.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/path.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/dep.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/modules.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/remote.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/repo.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/config.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/index.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/directives.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/expr.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/merge.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform_strings.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/rule.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/sort_labels.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/types.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/value.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/config.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/walk.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/LICENSE delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/lex.go delete mode 100755 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/parse.y.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/print.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/quote.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rewrite.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rule.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/syntax.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/walk.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go delete mode 100644 vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/tables.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/LICENSE delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/doc.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/fuzz.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/keysparsing.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/lexer.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/marshal.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/parser.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/position.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/token.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/toml.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_create.go delete mode 100644 vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_write.go delete mode 100644 vendor/repo-infra/vendor/github.com/pmezard/go-difflib/LICENSE delete mode 100644 vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/difflib.go delete mode 100644 vendor/repo-infra/vendor/golang.org/x/build/AUTHORS delete mode 100644 vendor/repo-infra/vendor/golang.org/x/build/CONTRIBUTORS delete mode 100644 vendor/repo-infra/vendor/golang.org/x/build/LICENSE delete mode 100644 vendor/repo-infra/vendor/golang.org/x/build/PATENTS delete mode 100644 vendor/repo-infra/vendor/golang.org/x/build/pargzip/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/golang.org/x/build/pargzip/pargzip.go delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/AUTHORS delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/CONTRIBUTORS delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/LICENSE delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/PATENTS delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/cmd/getgo/LICENSE delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/discovery.go delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/env.go delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/http.go delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/vcs.go delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/third_party/typescript/LICENSE delete mode 100644 vendor/repo-infra/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE delete mode 100644 vendor/repo-infra/vendor/k8s.io/klog/BUILD.bazel delete mode 100644 vendor/repo-infra/vendor/k8s.io/klog/LICENSE delete mode 100644 vendor/repo-infra/vendor/k8s.io/klog/klog.go delete mode 100644 vendor/repo-infra/vendor/k8s.io/klog/klog_file.go delete mode 100644 vendor/repo-infra/verify/BUILD.bazel delete mode 100644 vendor/repo-infra/verify/README.md delete mode 100644 vendor/repo-infra/verify/boilerplate/BUILD.bazel delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate.Dockerfile.txt delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate.Makefile.txt delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate.bzl.txt delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate.go.txt delete mode 100755 vendor/repo-infra/verify/boilerplate/boilerplate.py delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate.py.txt delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate.sh.txt delete mode 100644 vendor/repo-infra/verify/boilerplate/boilerplate_test.py delete mode 100644 vendor/repo-infra/verify/boilerplate/test/BUILD.bazel delete mode 100644 vendor/repo-infra/verify/boilerplate/test/fail.go delete mode 100644 vendor/repo-infra/verify/boilerplate/test/fail.py delete mode 100644 vendor/repo-infra/verify/boilerplate/test/pass.go delete mode 100644 vendor/repo-infra/verify/boilerplate/test/pass.py delete mode 100755 vendor/repo-infra/verify/go-tools/verify-gofmt.sh delete mode 100755 vendor/repo-infra/verify/go-tools/verify-gometalinter.sh delete mode 100755 vendor/repo-infra/verify/go-tools/verify-govet.sh delete mode 100755 vendor/repo-infra/verify/go_install_from_commit.sh delete mode 100755 vendor/repo-infra/verify/update-bazel.sh delete mode 100755 vendor/repo-infra/verify/verify-bazel.sh delete mode 100755 vendor/repo-infra/verify/verify-boilerplate.sh delete mode 100755 vendor/repo-infra/verify/verify-crosstool.sh delete mode 100755 vendor/repo-infra/verify/verify-errexit.sh delete mode 100755 vendor/repo-infra/verify/verify-go-src.sh diff --git a/vendor/repo-infra/.bazelrc b/vendor/repo-infra/.bazelrc deleted file mode 100644 index 1fa3377da1..0000000000 --- a/vendor/repo-infra/.bazelrc +++ /dev/null @@ -1,4 +0,0 @@ -build --verbose_failures -test --test_output=errors - -build:ci --noshow_progress diff --git a/vendor/repo-infra/.gitignore b/vendor/repo-infra/.gitignore deleted file mode 100644 index d35f6b36c4..0000000000 --- a/vendor/repo-infra/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -/bazel-* -/_output diff --git a/vendor/repo-infra/.kazelcfg.json b/vendor/repo-infra/.kazelcfg.json deleted file mode 100644 index a8b53943e8..0000000000 --- a/vendor/repo-infra/.kazelcfg.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "GoPrefix": "k8s.io/repo-infra" -} diff --git a/vendor/repo-infra/.travis.yml b/vendor/repo-infra/.travis.yml deleted file mode 100644 index 78326b854b..0000000000 --- a/vendor/repo-infra/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -dist: trusty -sudo: required - -# Install latest Go and Bazel and set up GOPATH. -before_install: -# gimme on travis is too old for .x version resolution, and gimme stable seems broken, too -- LATEST_GO=$(gimme --known | sort -V | tail -1) -- eval "$(gimme ${LATEST_GO})" -- mkdir -p $GOPATH/src/k8s.io -- mv $TRAVIS_BUILD_DIR $GOPATH/src/k8s.io -- cd $GOPATH/src/k8s.io/repo-infra -- echo "deb [arch=amd64] https://storage.googleapis.com/bazel-apt stable jdk1.8" | sudo tee /etc/apt/sources.list.d/bazel.list -- curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - -- sudo apt-get update - -install: - - sudo apt-get install bazel - - go get -u github.com/alecthomas/gometalinter - - go get -u github.com/bazelbuild/buildtools/buildifier - - gometalinter --install - -script: - # Build first since we need the generated protobuf for the govet checks - - bazel build --config=ci //... - - ./verify/verify-boilerplate.sh --rootdir="$(pwd)" -v - - GOPATH="${GOPATH}:$(pwd)/bazel-bin/verify/verify-go-src-go_path" ./verify/verify-go-src.sh --rootdir "$(pwd)" -v - - ./verify/verify-bazel.sh - - buildifier -mode=check $(find . -name BUILD -o -name '*.bzl' -type f -not -wholename '*/vendor/*') - - bazel test --config=ci //... diff --git a/vendor/repo-infra/BUILD.bazel b/vendor/repo-infra/BUILD.bazel deleted file mode 100644 index 52b9051443..0000000000 --- a/vendor/repo-infra/BUILD.bazel +++ /dev/null @@ -1,8 +0,0 @@ -# gazelle:prefix k8s.io/repo-infra - -# Use the Bazel-vendored protobuf library since we use go_proto_library -# gazelle:resolve go github.com/golang/protobuf/proto @com_github_golang_protobuf//proto:go_default_library - -package(default_visibility = ["//visibility:public"]) - -licenses(["notice"]) diff --git a/vendor/repo-infra/CONTRIBUTING.md b/vendor/repo-infra/CONTRIBUTING.md deleted file mode 100644 index ef37eb0b61..0000000000 --- a/vendor/repo-infra/CONTRIBUTING.md +++ /dev/null @@ -1,9 +0,0 @@ -# Contributing - -Thanks for taking the time to join our community and start contributing! - -The [Contributor Guide](https://github.com/kubernetes/community/blob/master/contributors/guide/README.md) -provides detailed instructions on how to get your ideas and bug fixes seen and accepted. - -Please remember to sign the [CNCF CLA](https://github.com/kubernetes/community/blob/master/CLA.md) and -read and observe the [Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md). diff --git a/vendor/repo-infra/Gopkg.lock b/vendor/repo-infra/Gopkg.lock deleted file mode 100644 index ac802a6c93..0000000000 --- a/vendor/repo-infra/Gopkg.lock +++ /dev/null @@ -1,69 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/bazelbuild/bazel-gazelle" - packages = [ - "cmd/gazelle", - "config", - "flag", - "internal/version", - "internal/wspace", - "label", - "language", - "language/go", - "language/proto", - "merger", - "pathtools", - "repo", - "resolve", - "rule", - "walk" - ] - revision = "cdeedbd624679ca4522da9670f2b3d02e9f8b84d" - version = "0.16.0" - -[[projects]] - name = "github.com/bazelbuild/buildtools" - packages = [ - "build", - "tables" - ] - revision = "80c7f0d45d7e40fa1f7362852697d4a03df557b3" - -[[projects]] - name = "github.com/pelletier/go-toml" - packages = ["."] - revision = "acdc4509485b587f5e675510c4f2c63e90ff68a8" - version = "v1.1.0" - -[[projects]] - name = "github.com/pmezard/go-difflib" - packages = ["difflib"] - revision = "792786c7400a136282c1664665ae0a8db921c6c2" - version = "v1.0.0" - -[[projects]] - branch = "master" - name = "golang.org/x/build" - packages = ["pargzip"] - revision = "125f04e1fc4b4cbfed95e5dd72a435fcb3847608" - -[[projects]] - branch = "master" - name = "golang.org/x/tools" - packages = ["go/vcs"] - revision = "77106db15f689a60e7d4e085d967ac557b918fb2" - -[[projects]] - branch = "master" - name = "k8s.io/klog" - packages = ["."] - revision = "b9b56d5dfc9208f60ea747056670942d8b0afdc8" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "09d2c0bcac0bf2579799808ceb710c97d4857c37ad06f4df039d5a3a81f0e431" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/vendor/repo-infra/Gopkg.toml b/vendor/repo-infra/Gopkg.toml deleted file mode 100644 index 0acce2783a..0000000000 --- a/vendor/repo-infra/Gopkg.toml +++ /dev/null @@ -1,30 +0,0 @@ -required = [ - "github.com/bazelbuild/bazel-gazelle/cmd/gazelle", -] - -[prune] - unused-packages = true - go-tests = true - non-go = true - -[[constraint]] - branch = "master" - name = "k8s.io/klog" - -[[constraint]] - branch = "master" - name = "golang.org/x/build" - - -# BEGIN gazelle dependencies -# Based on https://github.com/bazelbuild/bazel-gazelle/blob/0.16.0/go.mod - -[[constraint]] - name = "github.com/bazelbuild/bazel-gazelle" - version = "0.16.0" - -[[constraint]] - name = "github.com/bazelbuild/buildtools" - revision = "80c7f0d45d7e40fa1f7362852697d4a03df557b3" - -# END gazelle dependencies diff --git a/vendor/repo-infra/LICENSE b/vendor/repo-infra/LICENSE deleted file mode 100644 index 8dada3edaf..0000000000 --- a/vendor/repo-infra/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/repo-infra/OWNERS b/vendor/repo-infra/OWNERS deleted file mode 100644 index ed350c051e..0000000000 --- a/vendor/repo-infra/OWNERS +++ /dev/null @@ -1,4 +0,0 @@ -approvers: -- fejta -- ixdy -- mikedanese diff --git a/vendor/repo-infra/README.md b/vendor/repo-infra/README.md deleted file mode 100644 index 9bc8e1683f..0000000000 --- a/vendor/repo-infra/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# Kubernetes repository infrastructure -[![Build Status](https://travis-ci.org/kubernetes/repo-infra.svg?branch=master)](https://travis-ci.org/kubernetes/repo-infra) [![Go Report Card](https://goreportcard.com/badge/github.com/kubernetes/repo-infra)](https://goreportcard.com/report/github.com/kubernetes/repo-infra) - -This repository contains repository infrastructure tools for use in -`kubernetes` and `kubernetes-incubator` repositories. Examples: - -- Boilerplate verification -- Go source code quality verification -- Golang build infrastructure - ---- - -## Using this repository - -This repository can be used via some golang "vendoring" mechanism -(such as glide), or it can be used via -[git subtree](http://git.kernel.org/cgit/git/git.git/plain/contrib/subtree/git-subtree.txt). - -### Using "vendoring" - -The exact mechanism to pull in this repository will vary depending on -the tool you use. However, unless you end up having this repository -at the root of your project's repository you will probably need to -make sure you use the `--rootdir` command line parameter to let the -`verify-boilerplate.sh` know its location, eg: - - verify-boilerplate.sh --rootdir=/home/myrepo - -### Using `git subtree` - -When using the git subtree mechanism, this repository should be placed in the -top level of your project. - -To add `repo-infra` to your repository, use the following commands from the -root directory of **your** repository. - -First, add a git remote for the `repo-infra` repository: - -``` -$ git remote add repo-infra git://github.com/kubernetes/repo-infra -``` - -This is not strictly necessary, but reduces the typing required for subsequent -commands. - -Next, use `git subtree add` to create a new subtree in the `repo-infra` -directory within your project: - -``` -$ git subtree add -P repo-infra repo-infra master --squash -``` - -After this command, you will have: - -1. A `repo-infra` directory in your project containing the content of **this** - project -2. 2 new commits in the active branch: - 1. A commit that squashes the git history of the `repo-infra` project - 2. A merge commit whose ancestors are: - 1. The `HEAD` of the branch prior to when you ran `git subtree add` - 2. The commit containing the squashed `repo-infra` commits - -### Contributing - -Please see [CONTRIBUTING.md](CONTRIBUTING.md) for instructions on how to contribute. diff --git a/vendor/repo-infra/SECURITY_CONTACTS b/vendor/repo-infra/SECURITY_CONTACTS deleted file mode 100644 index f5fb5c9414..0000000000 --- a/vendor/repo-infra/SECURITY_CONTACTS +++ /dev/null @@ -1,14 +0,0 @@ -# Defined below are the security contacts for this repo. -# -# They are the contact point for the Product Security Committee to reach out -# to for triaging and handling of incoming issues. -# -# The below names agree to abide by the -# [Embargo Policy](https://git.k8s.io/security/private-distributors-list.md#embargo-policy) -# and will be removed and replaced if they violate that agreement. -# -# DO NOT REPORT SECURITY VULNERABILITIES DIRECTLY TO THESE NAMES, FOLLOW THE -# INSTRUCTIONS AT https://kubernetes.io/security/ - -ixdy -mikedanese diff --git a/vendor/repo-infra/WORKSPACE b/vendor/repo-infra/WORKSPACE deleted file mode 100644 index 4207000b09..0000000000 --- a/vendor/repo-infra/WORKSPACE +++ /dev/null @@ -1,21 +0,0 @@ -workspace(name = "io_k8s_repo_infra") - -load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - -http_archive( - name = "io_bazel_rules_go", - sha256 = "492c3ac68ed9dcf527a07e6a1b2dcbf199c6bf8b35517951467ac32e421c06c1", - url = "https://github.com/bazelbuild/rules_go/releases/download/0.17.0/rules_go-0.17.0.tar.gz", -) - -load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies") - -go_rules_dependencies() - -go_register_toolchains() - -http_archive( - name = "io_bazel", - sha256 = "6860a226c8123770b122189636fb0c156c6e5c9027b5b245ac3b2315b7b55641", - url = "https://github.com/bazelbuild/bazel/releases/download/0.22.0/bazel-0.22.0-dist.zip", -) diff --git a/vendor/repo-infra/code-of-conduct.md b/vendor/repo-infra/code-of-conduct.md deleted file mode 100644 index 0d15c00cf3..0000000000 --- a/vendor/repo-infra/code-of-conduct.md +++ /dev/null @@ -1,3 +0,0 @@ -# Kubernetes Community Code of Conduct - -Please refer to our [Kubernetes Community Code of Conduct](https://git.k8s.io/community/code-of-conduct.md) diff --git a/vendor/repo-infra/defs/BUILD.bazel b/vendor/repo-infra/defs/BUILD.bazel deleted file mode 100644 index ed9c6d7ffe..0000000000 --- a/vendor/repo-infra/defs/BUILD.bazel +++ /dev/null @@ -1,43 +0,0 @@ -load(":pkg.bzl", "pkg_tar") -load(":build.bzl", "release_filegroup") - -py_binary( - name = "gcs_uploader", - python_version = "PY2", - srcs = [ - "gcs_uploader.py", - ], - visibility = ["//visibility:public"], -) - -pkg_tar( - name = "pkg_tar_smoke", - srcs = glob(["*.bzl"]), -) - -# generate the hash files to use in the sh_tests below -release_filegroup( - name = "testfile", - testonly = True, - srcs = [":testdata/testfile.txt"], -) - -[ - sh_test( - name = "test_gen%ssum" % hash, - srcs = ["diff_test.sh"], - args = [ - "$(location testdata/testfile.txt.%s.expected)" % hash, - "$(location testdata/testfile.txt.%s)" % hash, - ], - data = [ - ":testdata/testfile.txt.%s" % hash, - ":testdata/testfile.txt.%s.expected" % hash, - ], - ) - for hash in [ - "md5", - "sha1", - "sha512", - ] -] diff --git a/vendor/repo-infra/defs/build.bzl b/vendor/repo-infra/defs/build.bzl deleted file mode 100644 index ff6d218a42..0000000000 --- a/vendor/repo-infra/defs/build.bzl +++ /dev/null @@ -1,192 +0,0 @@ -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -def _gcs_upload_impl(ctx): - output_lines = [] - for t in ctx.attr.data: - label = str(t.label) - upload_path = ctx.attr.upload_paths.get(label, "") - for f in t.files.to_list(): - output_lines.append("%s\t%s" % (f.short_path, upload_path)) - - ctx.actions.write( - output = ctx.outputs.targets, - content = "\n".join(output_lines), - ) - - ctx.actions.write( - content = "%s --manifest %s --root $PWD -- $@" % ( - ctx.attr.uploader.files_to_run.executable.short_path, - ctx.outputs.targets.short_path, - ), - output = ctx.outputs.executable, - is_executable = True, - ) - - return struct( - runfiles = ctx.runfiles( - files = ctx.files.data + ctx.files.uploader + [ctx.info_file, ctx.version_file, ctx.outputs.targets], - ), - ) - -# Adds an executable rule to upload the specified artifacts to GCS. -# -# The keys in upload_paths must match the elaborated targets exactly; i.e., -# one must specify "//foo/bar:bar" and not just "//foo/bar". -# -# Both the upload_paths and the path supplied on the commandline can include -# Python format strings which will be replaced by values from the workspace status, -# e.g. gs://my-bucket-{BUILD_USER}/stash/{STABLE_BUILD_SCM_REVISION} -gcs_upload = rule( - attrs = { - "data": attr.label_list( - mandatory = True, - allow_files = True, - ), - "uploader": attr.label( - default = Label("//defs:gcs_uploader"), - allow_files = True, - ), - # TODO: combine with 'data' when label_keyed_string_dict is supported in Bazel - "upload_paths": attr.string_dict( - allow_empty = True, - ), - }, - executable = True, - outputs = { - "targets": "%{name}-targets.txt", - }, - implementation = _gcs_upload_impl, -) - -# Computes the md5sum of the provided src file, saving it in a file named 'name'. -def md5sum(name, src, **kwargs): - native.genrule( - name = name + "_genmd5sum", - srcs = [src], - outs = [name], - cmd = "command -v md5 >/dev/null && cmd='md5 -q' || cmd=md5sum; $$cmd $< | awk '{print $$1}' >$@", - message = "Computing md5sum", - **kwargs - ) - -# Computes the sha1sum of the provided src file, saving it in a file named 'name'. -def sha1sum(name, src, **kwargs): - native.genrule( - name = name + "_gensha1sum", - srcs = [src], - outs = [name], - cmd = "command -v sha1sum >/dev/null && cmd=sha1sum || cmd='shasum -a1'; $$cmd $< | awk '{print $$1}' >$@", - message = "Computing sha1sum", - **kwargs - ) - -# Computes the sha512sum of the provided src file, saving it in a file named 'name'. -def sha512sum(name, src, **kwargs): - native.genrule( - name = name + "_gensha512sum", - srcs = [src], - outs = [name], - cmd = "command -v sha512sum >/dev/null && cmd=sha512sum || cmd='shasum -a512'; $$cmd $< | awk '{print $$1}' >$@", - message = "Computing sha512sum", - **kwargs - ) - -# Returns a list of hash target names for the provided srcs. -# Also updates the srcs_basenames_needing_hashes dictionary, -# mapping src name to basename for each target in srcs. -def _hashes_for_srcs(srcs, srcs_basenames_needing_hashes): - hashes = [] - for src in srcs: - parts = src.split(":") - if len(parts) > 1: - basename = parts[1] - else: - basename = src.split("/")[-1] - - srcs_basenames_needing_hashes[src] = basename - hashes.append(basename + ".md5") - hashes.append(basename + ".sha1") - hashes.append(basename + ".sha512") - return hashes - -# Creates 3+N rules based on the provided targets: -# * A filegroup with just the provided targets (named 'name') -# * A filegroup containing all of the md5, sha1 and sha512 hash files ('name-hashes') -# * A filegroup containing both of the above ('name-and-hashes') -# * All of the necessary md5sum, sha1sum and sha512sum rules -# -# The targets are specified using the srcs and conditioned_srcs attributes. -# srcs is expected to be label list. -# conditioned_srcs is a dictionary mapping conditions to label lists. -# It will be passed to select(). -def release_filegroup(name, srcs = None, conditioned_srcs = None, tags = None, visibility = None, **kwargs): - if not srcs and not conditioned_srcs: - fail("srcs and conditioned_srcs cannot both be empty") - srcs = srcs or [] - - # A given src may occur in multiple conditioned_srcs, but we want to create the hash - # rules only once, so use a dictionary to deduplicate. - srcs_basenames_needing_hashes = {} - - hashes = _hashes_for_srcs(srcs, srcs_basenames_needing_hashes) - conditioned_hashes = {} - if conditioned_srcs: - for condition, csrcs in conditioned_srcs.items(): - conditioned_hashes[condition] = _hashes_for_srcs(csrcs, srcs_basenames_needing_hashes) - - hash_tags = tags or [] - hash_tags.append("manual") - for src, basename in srcs_basenames_needing_hashes.items(): - md5sum(name = basename + ".md5", src = src, tags = hash_tags, visibility = visibility) - sha1sum(name = basename + ".sha1", src = src, tags = hash_tags, visibility = visibility) - sha512sum(name = basename + ".sha512", src = src, tags = hash_tags, visibility = visibility) - - if conditioned_srcs: - native.filegroup( - name = name, - srcs = srcs + select(conditioned_srcs), - tags = tags, - **kwargs - ) - native.filegroup( - name = name + "-hashes", - srcs = hashes + select(conditioned_hashes), - tags = tags, - visibility = visibility, - **kwargs - ) - else: - native.filegroup( - name = name, - srcs = srcs, - tags = tags, - visibility = visibility, - **kwargs - ) - native.filegroup( - name = name + "-hashes", - srcs = hashes, - tags = tags, - visibility = visibility, - **kwargs - ) - - native.filegroup( - name = name + "-and-hashes", - srcs = [name, name + "-hashes"], - tags = tags, - visibility = visibility, - **kwargs - ) diff --git a/vendor/repo-infra/defs/deb.bzl b/vendor/repo-infra/defs/deb.bzl deleted file mode 100644 index 30be3a5f1d..0000000000 --- a/vendor/repo-infra/defs/deb.bzl +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("//defs:pkg.bzl", "pkg_tar") -load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_deb") - -KUBERNETES_AUTHORS = "Kubernetes Authors " - -KUBERNETES_HOMEPAGE = "http://kubernetes.io" - -GOARCH_TO_DEBARCH = { - "386": "i386", - "amd64": "amd64", - "arm": "armhf", - "arm64": "arm64", - "ppc64le": "ppc64el", - "s390x": "s390x", -} - -def k8s_deb(name, goarch = "amd64", tags = None, **kwargs): - debarch = GOARCH_TO_DEBARCH[goarch] - pkg_deb( - name = name + "-" + goarch, - architecture = debarch, - data = select({"@io_bazel_rules_go//go/platform:" + goarch: name + "-data-" + goarch}), - homepage = KUBERNETES_HOMEPAGE, - maintainer = KUBERNETES_AUTHORS, - package = name, - tags = tags, - **kwargs - ) - -def deb_data(name, goarch = "amd64", data = [], tags = None, visibility = None): - deps = [] - for i, info in enumerate(data): - dname = "%s-deb-data-%s-%s" % (name, goarch, i) - deps += [dname] - pkg_tar( - name = dname, - srcs = select({"@io_bazel_rules_go//go/platform:" + goarch: info["files"]}), - mode = info["mode"], - package_dir = info["dir"], - tags = tags, - visibility = visibility, - ) - pkg_tar( - name = name + "-data-" + goarch, - tags = tags, - visibility = visibility, - deps = select({"@io_bazel_rules_go//go/platform:" + goarch: deps}), - ) diff --git a/vendor/repo-infra/defs/diff_test.sh b/vendor/repo-infra/defs/diff_test.sh deleted file mode 100755 index 437ce01882..0000000000 --- a/vendor/repo-infra/defs/diff_test.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2019 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -expected=$1 -generated=$2 - -diff=$(diff -u "${expected}" "${generated}" || true) - -if [[ -n "${diff}" ]]; then - echo "Generated file ${generated} does not match expected file ${expected}" - echo "${diff}" - exit 1 -fi diff --git a/vendor/repo-infra/defs/gcs_uploader.py b/vendor/repo-infra/defs/gcs_uploader.py deleted file mode 100644 index fa593fc19b..0000000000 --- a/vendor/repo-infra/defs/gcs_uploader.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import argparse -import atexit -import os -import os.path -import shutil -import subprocess -import sys -import tempfile - -def _workspace_status_dict(root): - d = {} - for f in ("stable-status.txt", "volatile-status.txt"): - with open(os.path.join(root, f)) as info_file: - for info_line in info_file: - info_line = info_line.strip("\n") - key, value = info_line.split(" ") - d[key] = value - return d - -def main(argv): - scratch = tempfile.mkdtemp(prefix="bazel-gcs.") - atexit.register(lambda: shutil.rmtree(scratch)) - - workspace_status = _workspace_status_dict(argv.root) - with open(argv.manifest) as manifest: - for artifact in manifest: - artifact = artifact.strip("\n") - src_file, dest_dir = artifact.split("\t") - dest_dir = dest_dir.format(**workspace_status) - scratch_dest_dir = os.path.join(scratch, dest_dir) - try: - os.makedirs(scratch_dest_dir) - except (OSError): - # skip directory already exists errors - pass - - src = os.path.join(argv.root, src_file) - dest = os.path.join(scratch_dest_dir, os.path.basename(src_file)) - os.symlink(src, dest) - - ret = 0 - uploaded_paths = [] - for gcs_path in argv.gcs_paths: - gcs_path = gcs_path.format(**workspace_status) - local_path = None - if gcs_path.startswith("file://"): - local_path = gcs_path[len("file://"):] - elif "://" not in gcs_path: - local_path = gcs_path - if local_path and not os.path.exists(local_path): - os.makedirs(local_path) - - cmd = ["gsutil"] - # When rsyncing to a local directory, parallelization thrashes the disk. - # It also seems to be buggy, causing frequent "File exists" errors. - # To mitigate, only use parallel mode when rsyncing to a remote path. - if not local_path: - cmd.append("-m") - cmd.extend(["rsync", "-C", "-r", scratch, gcs_path]) - ret |= subprocess.call(cmd) - - uploaded_paths.append(gcs_path) - - print("Uploaded to %s" % " ".join(uploaded_paths)) - sys.exit(ret) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Upload build targets to GCS.') - - parser.add_argument("--manifest", required=True, help="path to manifest of targets") - parser.add_argument("--root", required=True, help="path to root of workspace") - parser.add_argument("gcs_paths", nargs="+", help="path in gcs to push targets") - - main(parser.parse_args()) diff --git a/vendor/repo-infra/defs/go.bzl b/vendor/repo-infra/defs/go.bzl deleted file mode 100644 index 79c22af154..0000000000 --- a/vendor/repo-infra/defs/go.bzl +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@io_bazel_rules_go//go:def.bzl", "GoLibrary", "GoPath", "go_context", "go_path", "go_rule") - -def _compute_genrule_variables(resolved_srcs, resolved_outs, dep_import_paths): - variables = { - "SRCS": " ".join([src.path for src in resolved_srcs]), - "OUTS": " ".join([out.path for out in resolved_outs]), - "GO_IMPORT_PATHS": " ".join(dep_import_paths), - } - if len(resolved_srcs) == 1: - variables["<"] = resolved_srcs[0].path - if len(resolved_outs) == 1: - variables["@"] = resolved_outs[0].path - return variables - -def _go_genrule_impl(ctx): - go = go_context(ctx) - - transitive_depsets = [] - label_dict = {} - go_paths = [] - - for dep in ctx.attr.srcs: - transitive_depsets.append(dep.files) - label_dict[dep.label] = dep.files.to_list() - - dep_import_paths = [] - for dep in ctx.attr.go_deps: - dep_import_paths.append(dep[GoLibrary].importpath) - - for go_path in ctx.attr.go_paths: - transitive_depsets.append(go_path.files) - label_dict[go_path.label] = go_path.files.to_list() - - gp = go_path[GoPath] - ext = gp.gopath_file.extension - if ext == "": - # mode is 'copy' - path is just the gopath - go_paths.append(gp.gopath_file.path) - elif ext == "tag": - # mode is 'link' - path is a tag file in the gopath - go_paths.append(gp.gopath_file.dirname) - else: - fail("Unknown extension on gopath file: '%s'." % ext) - - all_srcs = depset( - go.sdk.libs + go.sdk.srcs + go.sdk.tools + [go.sdk.go], - transitive = transitive_depsets, - ) - - cmd = [ - "set -e", - "export GO_GENRULE_EXECROOT=$$(pwd)", - # Set GOPATH, GOROOT, and PATH to absolute paths so that commands can chdir without issue - "export GOPATH=" + ctx.configuration.host_path_separator.join(["$$GO_GENRULE_EXECROOT/" + p for p in go_paths]), - "export GOROOT=$$GO_GENRULE_EXECROOT/" + go.sdk.root_file.dirname, - "export PATH=$$GO_GENRULE_EXECROOT/" + go.sdk.root_file.dirname + "/bin:$$PATH", - ctx.attr.cmd.strip(" \t\n\r"), - ] - resolved_inputs, argv, runfiles_manifests = ctx.resolve_command( - command = "\n".join(cmd), - attribute = "cmd", - expand_locations = True, - make_variables = _compute_genrule_variables( - all_srcs.to_list(), - ctx.outputs.outs, - dep_import_paths, - ), - tools = ctx.attr.tools, - label_dict = label_dict, - ) - - env = {} - env.update(ctx.configuration.default_shell_env) - env.update(go.env) - env["PATH"] = ctx.configuration.host_path_separator.join(["/bin", "/usr/bin"]) - - ctx.actions.run_shell( - inputs = depset(resolved_inputs, transitive = [all_srcs]), - outputs = ctx.outputs.outs, - env = env, - command = argv, - progress_message = "%s %s" % (ctx.attr.message, ctx), - mnemonic = "GoGenrule", - ) - -# We have codegen procedures that depend on the "go/*" stdlib packages -# and thus depend on executing with a valid GOROOT. _go_genrule handles -# dependencies on the Go toolchain and environment variables; the -# macro go_genrule handles setting up GOPATH dependencies (using go_path). -_go_genrule = go_rule( - _go_genrule_impl, - attrs = { - "srcs": attr.label_list(allow_files = True), - "tools": attr.label_list( - cfg = "host", - allow_files = True, - ), - "outs": attr.output_list(mandatory = True), - "cmd": attr.string(mandatory = True), - "go_paths": attr.label_list(), - "go_deps": attr.label_list(providers = [GoLibrary]), - "importpath": attr.string(), - "message": attr.string(), - "executable": attr.bool(default = False), - }, - output_to_genfiles = True, -) - -# Genrule wrapper for tools which need dependencies in a valid GOPATH -# and access to the Go standard library and toolchain. -# -# Go source dependencies specified through the go_deps argument -# are passed to the rules_go go_path rule to build a GOPATH -# for the provided genrule command. -# -# The command can access the generated GOPATH through the GOPATH -# environment variable. -def go_genrule(name, go_deps, **kw): - go_path_name = "%s~gopath" % name - go_path( - name = go_path_name, - mode = "link", - visibility = ["//visibility:private"], - deps = go_deps, - ) - _go_genrule( - name = name, - go_paths = [":" + go_path_name], - go_deps = go_deps, - **kw - ) diff --git a/vendor/repo-infra/defs/pkg.bzl b/vendor/repo-infra/defs/pkg.bzl deleted file mode 100644 index 8356089d25..0000000000 --- a/vendor/repo-infra/defs/pkg.bzl +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load( - "@bazel_tools//tools/build_defs/pkg:pkg.bzl", - _real_pkg_tar = "pkg_tar", -) - -# pkg_tar wraps the official pkg_tar rule with our faster -# Go-based build_tar binary. -# Additionally, the upstream pkg_tar rule defaults mode to "0555", -# which prevents build_tar from automatically choosing an -# appropriate mode, so we instead default it to "". -def pkg_tar( - build_tar = "@io_k8s_repo_infra//tools/build_tar", - mode = "", - **kwargs): - _real_pkg_tar(build_tar = build_tar, mode = mode, **kwargs) diff --git a/vendor/repo-infra/defs/rpm.bzl b/vendor/repo-infra/defs/rpm.bzl deleted file mode 100644 index 2ef2b2d0ad..0000000000 --- a/vendor/repo-infra/defs/rpm.bzl +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -load("@bazel_tools//tools/build_defs/pkg:rpm.bzl", "pkg_rpm") - -GOARCH_TO_RPMARCH = { - "386": "i386", - "amd64": "x86_64", - "arm": "armhfp", - "arm64": "aarch64", - "ppc64le": "ppc64le", - "s390x": "s390x", -} - -def pkg_rpm_for_goarch(name, data, goarch, tags = None, **kwargs): - rpmarch = GOARCH_TO_RPMARCH[goarch] - pkg_rpm( - name = name + "-" + goarch, - architecture = rpmarch, - data = select( - { - "@io_bazel_rules_go//go/platform:" + goarch: [ - d.format(GOARCH = goarch, RPMARCH = rpmarch) - for d in data - ], - }, - ), - tags = tags, - **kwargs - ) diff --git a/vendor/repo-infra/defs/run_in_workspace.bzl b/vendor/repo-infra/defs/run_in_workspace.bzl deleted file mode 100644 index 929e0ce73e..0000000000 --- a/vendor/repo-infra/defs/run_in_workspace.bzl +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2018 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This technique was inspired by the gazelle rule implementation in bazelbuild/rules_go: -# https://github.com/bazelbuild/rules_go/blob/86ade29284ca11deeead86c061e9ba9bd0d157e0/go/private/tools/gazelle.bzl - -# Writes out a script which saves the runfiles directory, -# changes to the workspace root, and then runs a command. -def _workspace_binary_script_impl(ctx): - content = """#!/usr/bin/env bash -set -o errexit -set -o nounset -set -o pipefail - -BASE=$(pwd) -cd $(dirname $(readlink {root_file})) -"$BASE/{cmd}" $@ -""".format( - cmd = ctx.file.cmd.short_path, - root_file = ctx.file.root_file.short_path, - ) - ctx.actions.write( - output = ctx.outputs.executable, - content = content, - is_executable = True, - ) - runfiles = ctx.runfiles( - files = [ - ctx.file.cmd, - ctx.file.root_file, - ], - ) - return [DefaultInfo(runfiles = runfiles)] - -_workspace_binary_script = rule( - attrs = { - "cmd": attr.label( - mandatory = True, - allow_single_file = True, - ), - "root_file": attr.label( - mandatory = True, - allow_single_file = True, - ), - }, - executable = True, - implementation = _workspace_binary_script_impl, -) - -# Wraps a binary to be run in the workspace root via bazel run. -# -# For example, one might do something like -# -# workspace_binary( -# name = "dep", -# cmd = "//vendor/github.com/golang/dep/cmd/dep", -# ) -# -# which would allow running dep with bazel run. -def workspace_binary( - name, - cmd, - args = None, - visibility = None, - root_file = "//:WORKSPACE"): - script_name = name + "_script" - _workspace_binary_script( - name = script_name, - cmd = cmd, - root_file = root_file, - tags = ["manual"], - ) - native.sh_binary( - name = name, - srcs = [":" + script_name], - args = args, - visibility = visibility, - tags = ["manual"], - ) diff --git a/vendor/repo-infra/defs/testdata/testfile.txt b/vendor/repo-infra/defs/testdata/testfile.txt deleted file mode 100644 index ba5a5d3da4..0000000000 --- a/vendor/repo-infra/defs/testdata/testfile.txt +++ /dev/null @@ -1 +0,0 @@ -This is just some data to hash diff --git a/vendor/repo-infra/defs/testdata/testfile.txt.md5.expected b/vendor/repo-infra/defs/testdata/testfile.txt.md5.expected deleted file mode 100644 index 79c56b5a5b..0000000000 --- a/vendor/repo-infra/defs/testdata/testfile.txt.md5.expected +++ /dev/null @@ -1 +0,0 @@ -6c840392943ddfc0d213a8786ddcf913 diff --git a/vendor/repo-infra/defs/testdata/testfile.txt.sha1.expected b/vendor/repo-infra/defs/testdata/testfile.txt.sha1.expected deleted file mode 100644 index 76ef2bce9e..0000000000 --- a/vendor/repo-infra/defs/testdata/testfile.txt.sha1.expected +++ /dev/null @@ -1 +0,0 @@ -a1ee087329ef524229a8eb3dadc33265a0d30288 diff --git a/vendor/repo-infra/defs/testdata/testfile.txt.sha512.expected b/vendor/repo-infra/defs/testdata/testfile.txt.sha512.expected deleted file mode 100644 index 894b8a70ed..0000000000 --- a/vendor/repo-infra/defs/testdata/testfile.txt.sha512.expected +++ /dev/null @@ -1 +0,0 @@ -77f3b6ab7f0b30eac6c84baecd7308c462df575b5a1bea484253fcdf5a0f61f0d9aeaa5a25f51a1621c90d0cc461f041ba5dba7573092221287af620f0f1c573 diff --git a/vendor/repo-infra/kazel/BUILD.bazel b/vendor/repo-infra/kazel/BUILD.bazel deleted file mode 100644 index 9ed403e8f0..0000000000 --- a/vendor/repo-infra/kazel/BUILD.bazel +++ /dev/null @@ -1,41 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -licenses(["notice"]) - -load( - "@io_bazel_rules_go//go:def.bzl", - "go_binary", - "go_library", - "go_test", -) - -go_binary( - name = "kazel", - embed = [":go_default_library"], -) - -go_library( - name = "go_default_library", - srcs = [ - "config.go", - "diff.go", - "generator.go", - "kazel.go", - "sourcerer.go", - ], - importpath = "k8s.io/repo-infra/kazel", - deps = [ - "//vendor/github.com/bazelbuild/buildtools/build:go_default_library", - "//vendor/k8s.io/klog:go_default_library", - ], -) - -go_test( - name = "go_default_test", - srcs = [ - "generator_test.go", - "kazel_test.go", - ], - embed = [":go_default_library"], - deps = ["//vendor/github.com/bazelbuild/buildtools/build:go_default_library"], -) diff --git a/vendor/repo-infra/kazel/README.rst b/vendor/repo-infra/kazel/README.rst deleted file mode 100644 index 2cc96aee7c..0000000000 --- a/vendor/repo-infra/kazel/README.rst +++ /dev/null @@ -1,90 +0,0 @@ -kazel - a BUILD file generator for go and bazel -=============================================== - -Requirements: -############# - -* Your project must be somewhat compatible with go tool because - kazel uses go tool to parse your import tree. -* You must have a **GOPATH** and **GOROOT** setup and your project must - be in the correct location in your **GOPATH**. -* Your ``./vendor`` directory may not contain ``BUILD`` files. - -Usage: -###### - -1. Get kazel by running ``go get k8s.io/repo-infra/kazel``. - -2. Create a ``.kazelcfg.json`` in the root of the repository. For the - kazel repository, the ``.kazelcfg.json`` would look like: - - .. code-block:: json - - { - "GoPrefix": "k8s.io/repo-infra", - "SrcDirs": [ - "./kazel" - ], - "SkippedPaths": [ - ".*foobar(baz)?.*$" - ] - } - -3. Run kazel: - - .. code-block:: bash - - $ kazel -root=$GOPATH/src/k8s.io/repo-infra - -Defaults: -######### - -* **SrcDirs** in ``.kazelcfg.json`` defaults to ``["./"]`` -* ``-root`` option defaults to the current working directory - -Automanagement: -############### - -kazel reconciles rules that have the "**automanaged**" tag. If -you no longer want kazel to manage a rule, you can remove the -**automanaged** tag and kazel will no longer manage that rule. - -kazel only manages srcs, deps, and library attributes of a -rule after initial creation so you can add and managed other -attributes like data and copts and kazel will respect your -changes. - -kazel automatically formats all ``BUILD`` files in your repository -except for those matching **SkippedPaths**. - -Adding "sources" rules: -####################### - -If you set "**AddSourcesRules**": ``true`` in your ``.kazelcfg.json``, -kazel will create "**package-srcs**" and "**all-srcs**" rules in every -package. - -The "**package-srcs**" rule is a glob matching all files in the -package recursively, but not any files owned by packages in -subdirectories. - -The "**all-srcs**" rule includes both the "**package-srcs**" rule and -the "**all-srcs**" rules of all subpackages; i.e. **//:all-srcs** will -include all files in your repository. - -The "**package-srcs**" rule defaults to private visibility, -since it is safer to depend on the "**all-srcs**" rule: if a -subpackage is added, the "**package-srcs**" rule will no longer -include those files. - -You can remove the "**automanaged**" tag from the "**package-srcs**" -rule if you need to modify the glob (such as adding excludes). -It's recommended that you leave the "**all-srcs**" rule -automanaged. - -Validating BUILD files in CI: -############################# - -If you run kazel with ``--validate``, it will not update any ``BUILD`` files, but it -will exit nonzero if any ``BUILD`` files are out-of-date. You can add ``--print-diff`` -to print out the changes needed. diff --git a/vendor/repo-infra/kazel/config.go b/vendor/repo-infra/kazel/config.go deleted file mode 100644 index 3d2363196c..0000000000 --- a/vendor/repo-infra/kazel/config.go +++ /dev/null @@ -1,70 +0,0 @@ -/* -Copyright 2017 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "encoding/json" - "io/ioutil" -) - -// Cfg defines the configuration options for kazel. -type Cfg struct { - GoPrefix string - // evaluated recursively, defaults to ["."] - SrcDirs []string - // regexps that match packages to skip - SkippedPaths []string - // regexps that match packages to skip for k8s codegen. - // note that this skips anything matched by SkippedPaths as well. - SkippedK8sCodegenPaths []string - // whether to add "pkg-srcs" and "all-srcs" filegroups - // note that this operates on the entire tree (not just SrcsDirs) but skips anything matching SkippedPaths - AddSourcesRules bool - // whether to have multiple build files in vendor/ or just one. - VendorMultipleBuildFiles bool - // Whether to manage the upstream Go rules provided by bazelbuild/rules_go. - // If using gazelle, set this to false (or omit). - ManageGoRules bool - // If defined, metadata parsed from "+k8s:" codegen build tags will be saved into this file. - K8sCodegenBzlFile string - // If defined, contains the boilerplate text to be included in the header of the generated bzl file. - K8sCodegenBoilerplateFile string - // Which tags to include in the codegen bzl file. - // Include only the name of the tag. - // For example, to include +k8s:foo=bar, list "foo" here. - K8sCodegenTags []string -} - -// ReadCfg reads and unmarshals the specified json file into a Cfg struct. -func ReadCfg(cfgPath string) (*Cfg, error) { - b, err := ioutil.ReadFile(cfgPath) - if err != nil { - return nil, err - } - var cfg Cfg - if err := json.Unmarshal(b, &cfg); err != nil { - return nil, err - } - defaultCfg(&cfg) - return &cfg, nil -} - -func defaultCfg(c *Cfg) { - if len(c.SrcDirs) == 0 { - c.SrcDirs = []string{"."} - } -} diff --git a/vendor/repo-infra/kazel/diff.go b/vendor/repo-infra/kazel/diff.go deleted file mode 100644 index 37bed9381c..0000000000 --- a/vendor/repo-infra/kazel/diff.go +++ /dev/null @@ -1,60 +0,0 @@ -/* -Copyright 2017 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "io/ioutil" - "os" - "os/exec" -) - -// Diff prints the unified diff of the two provided byte slices -// using the unix diff command. -func Diff(left, right []byte) error { - lf, err := ioutil.TempFile("/tmp", "actual-file-") - if err != nil { - return err - } - defer lf.Close() - defer os.Remove(lf.Name()) - - rf, err := ioutil.TempFile("/tmp", "expected-file-") - if err != nil { - return err - } - defer rf.Close() - defer os.Remove(rf.Name()) - - _, err = lf.Write(left) - if err != nil { - return err - } - lf.Close() - - _, err = rf.Write(right) - if err != nil { - return err - } - rf.Close() - - cmd := exec.Command("/usr/bin/diff", "-u", lf.Name(), rf.Name()) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - cmd.Run() - - return nil -} diff --git a/vendor/repo-infra/kazel/generator.go b/vendor/repo-infra/kazel/generator.go deleted file mode 100644 index 2cb8c0fc7e..0000000000 --- a/vendor/repo-infra/kazel/generator.go +++ /dev/null @@ -1,178 +0,0 @@ -/* -Copyright 2017 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "io/ioutil" - "os" - "path/filepath" - "regexp" - "sort" - "strings" - - "github.com/bazelbuild/buildtools/build" -) - -var ( - // Generator tags are specified using the format "// +k8s:name=value" - genTagRe = regexp.MustCompile(`//\s*\+k8s:([^\s=]+)(?:=(\S+))\s*\n`) -) - -// {tagName: {value: {pkgs}}} or {tagName: {pkg: {values}}} -type generatorTagsMap map[string]map[string]map[string]bool - -// extractTags finds k8s codegen tags found in b listed in requestedTags. -// It returns a map of {tag name: slice of values for that tag}. -func extractTags(b []byte, requestedTags map[string]bool) map[string][]string { - tags := make(map[string][]string) - matches := genTagRe.FindAllSubmatch(b, -1) - for _, m := range matches { - if len(m) >= 3 { - tag, values := string(m[1]), string(m[2]) - if _, requested := requestedTags[tag]; !requested { - continue - } - tags[tag] = append(tags[tag], strings.Split(values, ",")...) - } - } - return tags -} - -// findGeneratorTags searches for all packages under root that include a kubernetes generator -// tag comment. It does not follow symlinks, and any path in the configured skippedPaths -// or codegen skipped paths is skipped. -func (v *Vendorer) findGeneratorTags(root string, requestedTags map[string]bool) (tagsValuesPkgs, tagsPkgsValues generatorTagsMap, err error) { - tagsValuesPkgs = make(generatorTagsMap) - tagsPkgsValues = make(generatorTagsMap) - - err = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - pkg := filepath.Dir(path) - - for _, r := range v.skippedK8sCodegenPaths { - if r.MatchString(pkg) { - return filepath.SkipDir - } - } - - if !strings.HasSuffix(path, ".go") || strings.HasSuffix(path, "_test.go") { - return nil - } - - b, err := ioutil.ReadFile(path) - if err != nil { - return err - } - - for tag, values := range extractTags(b, requestedTags) { - if _, present := tagsValuesPkgs[tag]; !present { - tagsValuesPkgs[tag] = make(map[string]map[string]bool) - } - if _, present := tagsPkgsValues[tag]; !present { - tagsPkgsValues[tag] = make(map[string]map[string]bool) - } - if _, present := tagsPkgsValues[tag][pkg]; !present { - tagsPkgsValues[tag][pkg] = make(map[string]bool) - } - for _, v := range values { - if _, present := tagsValuesPkgs[tag][v]; !present { - tagsValuesPkgs[tag][v] = make(map[string]bool) - } - // Since multiple files in the same package may list a given tag/value, use a set to deduplicate. - tagsValuesPkgs[tag][v][pkg] = true - tagsPkgsValues[tag][pkg][v] = true - } - } - - return nil - }) - - if err != nil { - return nil, nil, err - } - - return -} - -// flattened returns a copy of the map with the final stringSet flattened into a sorted slice. -func flattened(m generatorTagsMap) map[string]map[string][]string { - flattened := make(map[string]map[string][]string) - for tag, subMap := range m { - flattened[tag] = make(map[string][]string) - for k, subSet := range subMap { - for v := range subSet { - flattened[tag][k] = append(flattened[tag][k], v) - } - sort.Strings(flattened[tag][k]) - } - } - return flattened -} - -// walkGenerated generates a k8s codegen bzl file that can be parsed by Starlark -// rules and macros to find packages needed k8s code generation. -// This involves reading all non-test go sources in the tree and looking for -// "+k8s:name=value" tags. Only those tags listed in K8sCodegenTags will be -// included. -// If a K8sCodegenBoilerplateFile was configured, the contents of this file -// will be included as the header of the generated bzl file. -// Returns true if there are diffs against the existing generated bzl file. -func (v *Vendorer) walkGenerated() (bool, error) { - if v.cfg.K8sCodegenBzlFile == "" { - return false, nil - } - // only include the specified tags - requestedTags := make(map[string]bool) - for _, tag := range v.cfg.K8sCodegenTags { - requestedTags[tag] = true - } - tagsValuesPkgs, tagsPkgsValues, err := v.findGeneratorTags(".", requestedTags) - if err != nil { - return false, err - } - - f := &build.File{ - Path: v.cfg.K8sCodegenBzlFile, - } - addCommentBefore(f, "#################################################") - addCommentBefore(f, "# # # # # # # # # # # # # # # # # # # # # # # # #") - addCommentBefore(f, "This file is autogenerated by kazel. DO NOT EDIT.") - addCommentBefore(f, "# # # # # # # # # # # # # # # # # # # # # # # # #") - addCommentBefore(f, "#################################################") - addCommentBefore(f, "") - - f.Stmt = append(f.Stmt, varExpr("go_prefix", "The go prefix passed to kazel", v.cfg.GoPrefix)) - f.Stmt = append(f.Stmt, varExpr("kazel_configured_tags", "The list of codegen tags kazel is configured to find", v.cfg.K8sCodegenTags)) - f.Stmt = append(f.Stmt, varExpr("tags_values_pkgs", "tags_values_pkgs is a dictionary mapping {k8s build tag: {tag value: [pkgs including that tag:value]}}", flattened(tagsValuesPkgs))) - f.Stmt = append(f.Stmt, varExpr("tags_pkgs_values", "tags_pkgs_values is a dictionary mapping {k8s build tag: {pkg: [tag values in pkg]}}", flattened(tagsPkgsValues))) - - var boilerplate []byte - if v.cfg.K8sCodegenBoilerplateFile != "" { - boilerplate, err = ioutil.ReadFile(v.cfg.K8sCodegenBoilerplateFile) - if err != nil { - return false, err - } - } - // Open existing file to use in diff mode. - _, err = os.Stat(f.Path) - if err != nil && !os.IsNotExist(err) { - return false, err - } - return writeFile(f.Path, f, boilerplate, !os.IsNotExist(err), v.dryRun) -} diff --git a/vendor/repo-infra/kazel/generator_test.go b/vendor/repo-infra/kazel/generator_test.go deleted file mode 100644 index b5653e6ce4..0000000000 --- a/vendor/repo-infra/kazel/generator_test.go +++ /dev/null @@ -1,122 +0,0 @@ -/* -Copyright 2018 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "reflect" - "testing" -) - -func TestExtractTags(t *testing.T) { - requestedTags := map[string]bool{ - "foo-gen": true, - "baz-gen": true, - "quux-gen:with-extra@things": true, - } - var testCases = []struct { - src string - want map[string][]string - }{ - { - src: "// +k8s:foo-gen=a,b\n", - want: map[string][]string{"foo-gen": {"a", "b"}}, - }, - { - src: "// +k8s:bar-gen=a,b\n", - want: map[string][]string{}, - }, - { - src: "// +k8s:quux-gen=true\n", - want: map[string][]string{}, - }, - { - src: "// +k8s:quux-gen:with-extra@things=123\n", - want: map[string][]string{"quux-gen:with-extra@things": {"123"}}, - }, - { - src: `/* -This is a header. -*/ -// +k8s:foo-gen=first -// +k8s:bar-gen=true -// +build linux - -// +k8s:baz-gen=1,2,a -// +k8s:baz-gen=b - -// k8s:foo-gen=not-this-one -// commenting out this one too +k8s:foo-gen=disabled -// +k8s:foo-gen=ignore this one too - -// Let's repeat one! -// +k8s:baz-gen=b -// +k8s:foo-gen=last - -import "some package" -`, - want: map[string][]string{ - "foo-gen": {"first", "last"}, - "baz-gen": {"1", "2", "a", "b", "b"}, - }, - }, - } - - for _, testCase := range testCases { - result := extractTags([]byte(testCase.src), requestedTags) - if !reflect.DeepEqual(result, testCase.want) { - t.Errorf("extractTags(%v) = %v; want %v", testCase.src, result, testCase.want) - } - } -} - -func TestFlattened(t *testing.T) { - m := generatorTagsMap{ - "foo-gen": { - "a": { - "pkg/one": true, - "pkg/two": true, - }, - }, - "bar-gen": { - "true": { - "pkg/one": true, - "pkg/three": true, - // also test sorting - this should end up at the front of the slice - "a/pkg": true, - }, - "false": { - "pkg/one": true, - }, - }, - } - - want := map[string]map[string][]string{ - "foo-gen": { - "a": {"pkg/one", "pkg/two"}, - }, - "bar-gen": { - "true": {"a/pkg", "pkg/one", "pkg/three"}, - "false": {"pkg/one"}, - }, - } - - result := flattened(m) - if !reflect.DeepEqual(result, want) { - t.Errorf("flattened(%v) = %v; want %v", m, result, want) - } - -} diff --git a/vendor/repo-infra/kazel/kazel.go b/vendor/repo-infra/kazel/kazel.go deleted file mode 100644 index ec8f14a1f6..0000000000 --- a/vendor/repo-infra/kazel/kazel.go +++ /dev/null @@ -1,389 +0,0 @@ -/* -Copyright 2017 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "bytes" - "flag" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "reflect" - "regexp" - "sort" - - "github.com/bazelbuild/buildtools/build" - - "k8s.io/klog" -) - -const ( - automanagedTag = "automanaged" -) - -var ( - root = flag.String("root", ".", "root of go source") - dryRun = flag.Bool("dry-run", false, "run in dry mode") - printDiff = flag.Bool("print-diff", false, "print diff to stdout") - validate = flag.Bool("validate", false, "run in dry mode and exit nonzero if any BUILD files need to be updated") - cfgPath = flag.String("cfg-path", ".kazelcfg.json", "path to kazel config (relative paths interpreted relative to -repo.") -) - -func main() { - flag.Parse() - flag.Set("alsologtostderr", "true") - if *root == "" { - klog.Fatalf("-root argument is required") - } - if *validate { - *dryRun = true - } - v, err := newVendorer(*root, *cfgPath, *dryRun) - if err != nil { - klog.Fatalf("unable to build vendorer: %v", err) - } - if err = os.Chdir(v.root); err != nil { - klog.Fatalf("cannot chdir into root %q: %v", v.root, err) - } - if v.cfg.ManageGoRules { - klog.Fatalf("kazel no longer supports managing Go rules") - } - - wroteGenerated := false - if wroteGenerated, err = v.walkGenerated(); err != nil { - klog.Fatalf("err walking generated: %v", err) - } - if _, err = v.walkSource("."); err != nil { - klog.Fatalf("err walking source: %v", err) - } - written := 0 - if written, err = v.reconcileAllRules(); err != nil { - klog.Fatalf("err reconciling rules: %v", err) - } - if wroteGenerated { - written++ - } - if *validate && written > 0 { - fmt.Fprintf(os.Stderr, "\n%d BUILD files not up-to-date.\n", written) - os.Exit(1) - } -} - -// Vendorer collects context, configuration, and cache while walking the tree. -type Vendorer struct { - skippedPaths []*regexp.Regexp - skippedK8sCodegenPaths []*regexp.Regexp - dryRun bool - root string - cfg *Cfg - newRules map[string][]*build.Rule // package path -> list of rules to add or update - managedAttrs []string // which rule attributes kazel will overwrite -} - -func newVendorer(root, cfgPath string, dryRun bool) (*Vendorer, error) { - absRoot, err := filepath.Abs(root) - if err != nil { - return nil, fmt.Errorf("could not get absolute path: %v", err) - } - if !filepath.IsAbs(cfgPath) { - cfgPath = filepath.Join(absRoot, cfgPath) - } - cfg, err := ReadCfg(cfgPath) - if err != nil { - return nil, err - } - - v := Vendorer{ - dryRun: dryRun, - root: absRoot, - cfg: cfg, - newRules: make(map[string][]*build.Rule), - managedAttrs: []string{"srcs"}, - } - - builtIn, err := compileSkippedPaths([]string{"^\\.git", "^bazel-*"}) - if err != nil { - return nil, err - } - - sp, err := compileSkippedPaths(cfg.SkippedPaths) - if err != nil { - return nil, err - } - sp = append(builtIn, sp...) - v.skippedPaths = sp - - sop, err := compileSkippedPaths(cfg.SkippedK8sCodegenPaths) - if err != nil { - return nil, err - } - v.skippedK8sCodegenPaths = append(sop, sp...) - - return &v, nil - -} - -func writeRules(file *build.File, rules []*build.Rule) { - for _, rule := range rules { - file.Stmt = append(file.Stmt, rule.Call) - } -} - -func (v *Vendorer) addRules(pkgPath string, rules []*build.Rule) { - cleanPath := filepath.Clean(pkgPath) - v.newRules[cleanPath] = append(v.newRules[cleanPath], rules...) -} - -func (v *Vendorer) reconcileAllRules() (int, error) { - var paths []string - for path := range v.newRules { - paths = append(paths, path) - } - sort.Strings(paths) - written := 0 - for _, path := range paths { - w, err := ReconcileRules(path, v.newRules[path], v.managedAttrs, v.dryRun) - if w { - written++ - } - if err != nil { - return written, err - } - } - return written, nil -} - -// addCommentBefore adds a whole-line comment before the provided Expr. -func addCommentBefore(e build.Expr, comment string) { - c := e.Comment() - c.Before = append(c.Before, build.Comment{Token: fmt.Sprintf("# %s", comment)}) -} - -// varExpr creates a variable expression of the form "name = expr". -// v will be converted into an appropriate Expr using asExpr. -// The optional description will be included as a comment before the expression. -func varExpr(name, desc string, v interface{}) build.Expr { - e := &build.BinaryExpr{ - X: &build.LiteralExpr{Token: name}, - Op: "=", - Y: asExpr(v), - } - if desc != "" { - addCommentBefore(e, desc) - } - return e -} - -// rvSliceLessFunc returns a function that can be used with sort.Slice() or sort.SliceStable() -// to sort a slice of reflect.Values. -// It sorts ints and floats as their native kinds, and everything else as a string. -func rvSliceLessFunc(k reflect.Kind, vs []reflect.Value) func(int, int) bool { - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return func(i, j int) bool { return vs[i].Int() < vs[j].Int() } - case reflect.Float32, reflect.Float64: - return func(i, j int) bool { return vs[i].Float() < vs[j].Float() } - default: - return func(i, j int) bool { - return fmt.Sprintf("%v", vs[i]) < fmt.Sprintf("%v", vs[j]) - } - } -} - -// asExpr converts a native Go type into the equivalent Starlark expression using reflection. -// The keys of maps will be sorted for reproducibility. -func asExpr(e interface{}) build.Expr { - rv := reflect.ValueOf(e) - switch rv.Kind() { - case reflect.Bool: - return &build.LiteralExpr{Token: fmt.Sprintf("%t", e)} - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return &build.LiteralExpr{Token: fmt.Sprintf("%d", e)} - case reflect.Float32, reflect.Float64: - return &build.LiteralExpr{Token: fmt.Sprintf("%g", e)} - case reflect.String: - return &build.StringExpr{Value: e.(string)} - case reflect.Slice, reflect.Array: - var list []build.Expr - for i := 0; i < rv.Len(); i++ { - list = append(list, asExpr(rv.Index(i).Interface())) - } - return &build.ListExpr{List: list} - case reflect.Map: - var list []build.Expr - keys := rv.MapKeys() - sort.SliceStable(keys, rvSliceLessFunc(rv.Type().Key().Kind(), keys)) - for _, key := range keys { - list = append(list, &build.KeyValueExpr{ - Key: asExpr(key.Interface()), - Value: asExpr(rv.MapIndex(key).Interface()), - }) - } - return &build.DictExpr{List: list} - default: - klog.Fatalf("unhandled kind: %q for value: %q", rv.Kind(), rv) - return nil - } -} - -func newRule(rt, name string, attrs map[string]build.Expr) *build.Rule { - rule := &build.Rule{ - Call: &build.CallExpr{ - X: &build.LiteralExpr{Token: rt}, - }, - } - rule.SetAttr("name", asExpr(name)) - for k, v := range attrs { - rule.SetAttr(k, v) - } - rule.SetAttr("tags", asExpr([]string{automanagedTag})) - return rule -} - -// findBuildFile determines the name of a preexisting BUILD file, returning -// a default if no such file exists. -func findBuildFile(pkgPath string) (bool, string) { - options := []string{"BUILD.bazel", "BUILD"} - for _, b := range options { - path := filepath.Join(pkgPath, b) - info, err := os.Stat(path) - if err == nil && !info.IsDir() { - return true, path - } - } - return false, filepath.Join(pkgPath, "BUILD.bazel") -} - -// ReconcileRules reconciles, simplifies, and writes the rules for the specified package, adding -// additional dependency rules as needed. -func ReconcileRules(pkgPath string, rules []*build.Rule, managedAttrs []string, dryRun bool) (bool, error) { - _, path := findBuildFile(pkgPath) - info, err := os.Stat(path) - if err != nil && os.IsNotExist(err) { - f := &build.File{} - writeRules(f, rules) - return writeFile(path, f, nil, false, dryRun) - } else if err != nil { - return false, err - } - if info.IsDir() { - return false, fmt.Errorf("%q cannot be a directory", path) - } - b, err := ioutil.ReadFile(path) - if err != nil { - return false, err - } - f, err := build.Parse(path, b) - if err != nil { - return false, err - } - oldRules := make(map[string]*build.Rule) - for _, r := range f.Rules("") { - oldRules[r.Name()] = r - } - for _, r := range rules { - o, ok := oldRules[r.Name()] - if !ok { - f.Stmt = append(f.Stmt, r.Call) - continue - } - if !RuleIsManaged(o) { - continue - } - reconcileAttr := func(o, n *build.Rule, name string) { - if e := n.Attr(name); e != nil { - o.SetAttr(name, e) - } else { - o.DelAttr(name) - } - } - for _, attr := range managedAttrs { - reconcileAttr(o, r, attr) - } - delete(oldRules, r.Name()) - } - - for _, r := range oldRules { - if !RuleIsManaged(r) { - continue - } - f.DelRules(r.Kind(), r.Name()) - } - - return writeFile(path, f, nil, true, dryRun) -} - -// RuleIsManaged returns whether the provided rule is managed by this tool, -// based on the tags set on the rule. -func RuleIsManaged(r *build.Rule) bool { - for _, tag := range r.AttrStrings("tags") { - if tag == automanagedTag { - return true - } - } - return false -} - -// writeFile writes out f to path, prepending boilerplate to the output. -// If exists is true, compares against the existing file specified by path, -// returning false if there are no changes. -// Otherwise, returns true. -// If dryRun is false, no files are actually changed; otherwise, the file will be written. -func writeFile(path string, f *build.File, boilerplate []byte, exists, dryRun bool) (bool, error) { - var info build.RewriteInfo - build.Rewrite(f, &info) - var out []byte - out = append(out, boilerplate...) - out = append(out, build.Format(f)...) - if exists { - orig, err := ioutil.ReadFile(path) - if err != nil { - return false, err - } - if bytes.Compare(orig, out) == 0 { - return false, nil - } - if *printDiff { - Diff(orig, out) - } - } - if dryRun { - fmt.Fprintf(os.Stderr, "DRY-RUN: wrote %q\n", path) - return true, nil - } - werr := ioutil.WriteFile(path, out, 0644) - if werr == nil { - fmt.Fprintf(os.Stderr, "wrote %q\n", path) - } - return werr == nil, werr -} - -func compileSkippedPaths(skippedPaths []string) ([]*regexp.Regexp, error) { - regexPaths := []*regexp.Regexp{} - - for _, sp := range skippedPaths { - r, err := regexp.Compile(sp) - if err != nil { - return nil, err - } - regexPaths = append(regexPaths, r) - } - return regexPaths, nil -} diff --git a/vendor/repo-infra/kazel/kazel_test.go b/vendor/repo-infra/kazel/kazel_test.go deleted file mode 100644 index e4dd68e140..0000000000 --- a/vendor/repo-infra/kazel/kazel_test.go +++ /dev/null @@ -1,76 +0,0 @@ -/* -Copyright 2018 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "testing" - - "github.com/bazelbuild/buildtools/build" -) - -func TestAsExpr(t *testing.T) { - var testCases = []struct { - expr interface{} - want string - }{ - {42, "42"}, - {2.71828, "2.71828"}, - {2.718281828459045, "2.718281828459045"}, - {"a string", `"a string"`}, - // values should stay in specified order - {[]int{4, 7, 2, 9, 21}, `[ - 4, - 7, - 2, - 9, - 21, -]`}, - // keys should get sorted - {map[int]string{1: "foo", 5: "baz", 3: "bar"}, `{ - 1: "foo", - 3: "bar", - 5: "baz", -}`}, - // keys true and false should be sorted by their string representation - { - map[bool]map[string][]float64{ - true: {"b": {2, 2.2}, "a": {1, 1.1, 1.11}}, - false: {"": {}}, - }, - `{ - false: {"": []}, - true: { - "a": [ - 1, - 1.1, - 1.11, - ], - "b": [ - 2, - 2.2, - ], - }, -}`}, - } - - for _, testCase := range testCases { - result := build.FormatString(asExpr(testCase.expr)) - if result != testCase.want { - t.Errorf("asExpr(%v) = %v; want %v", testCase.expr, result, testCase.want) - } - } -} diff --git a/vendor/repo-infra/kazel/sourcerer.go b/vendor/repo-infra/kazel/sourcerer.go deleted file mode 100644 index 68ef83146b..0000000000 --- a/vendor/repo-infra/kazel/sourcerer.go +++ /dev/null @@ -1,109 +0,0 @@ -/* -Copyright 2017 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "fmt" - "io/ioutil" - "path/filepath" - - "github.com/bazelbuild/buildtools/build" -) - -const ( - pkgSrcsTarget = "package-srcs" - allSrcsTarget = "all-srcs" -) - -// walkSource walks the source tree recursively from pkgPath, adding -// any BUILD files to v.newRules to be formatted. -// -// If AddSourcesRules is enabled in the kazel config, then we additionally add -// package-sources and recursive all-srcs filegroups rules to every BUILD file. -// -// Returns the list of children all-srcs targets that should be added to the -// all-srcs rule of the enclosing package. -func (v *Vendorer) walkSource(pkgPath string) ([]string, error) { - // clean pkgPath since we access v.newRules directly - pkgPath = filepath.Clean(pkgPath) - for _, r := range v.skippedPaths { - if r.MatchString(pkgPath) { - return nil, nil - } - } - files, err := ioutil.ReadDir(pkgPath) - if err != nil { - return nil, err - } - - // Find any children packages we need to include in an all-srcs rule. - var children []string - for _, f := range files { - if f.IsDir() { - c, err := v.walkSource(filepath.Join(pkgPath, f.Name())) - if err != nil { - return nil, err - } - children = append(children, c...) - } - } - - // This path is a package either if we've added rules or if a BUILD file already exists. - _, hasRules := v.newRules[pkgPath] - isPkg := hasRules - if !isPkg { - isPkg, _ = findBuildFile(pkgPath) - } - - if !isPkg { - // This directory isn't a package (doesn't contain a BUILD file), - // but there might be subdirectories that are packages, - // so pass that up to our parent. - return children, nil - } - - // Enforce formatting the BUILD file, even if we're not adding srcs rules - if !hasRules { - v.addRules(pkgPath, nil) - } - - if !v.cfg.AddSourcesRules { - return nil, nil - } - - pkgSrcsExpr := &build.LiteralExpr{Token: `glob(["**"])`} - if pkgPath == "." { - pkgSrcsExpr = &build.LiteralExpr{Token: `glob(["**"], exclude=["bazel-*/**", ".git/**"])`} - } - - v.addRules(pkgPath, []*build.Rule{ - newRule("filegroup", - pkgSrcsTarget, - map[string]build.Expr{ - "srcs": pkgSrcsExpr, - "visibility": asExpr([]string{"//visibility:private"}), - }), - newRule("filegroup", - allSrcsTarget, - map[string]build.Expr{ - "srcs": asExpr(append(children, fmt.Sprintf(":%s", pkgSrcsTarget))), - // TODO: should this be more restricted? - "visibility": asExpr([]string{"//visibility:public"}), - }), - }) - return []string{fmt.Sprintf("//%s:%s", pkgPath, allSrcsTarget)}, nil -} diff --git a/vendor/repo-infra/tools/BUILD.bazel b/vendor/repo-infra/tools/BUILD.bazel deleted file mode 100644 index 3fc616ea7e..0000000000 --- a/vendor/repo-infra/tools/BUILD.bazel +++ /dev/null @@ -1,65 +0,0 @@ -# Note: we don't currently generate the CROSSTOOL file at build time -# because it's needed by Bazel in the analysis phase, and running Go -# programs during the analysis phase is tricky. (Also, we'd like to -# avoid recompiling protoc all the time.) -# Instead, we check in a pregenerated CROSSTOOL file, but also add a -# test to verify that it is up-to-date. -# Once CROSSTOOL has migrated to Starlark rules, we can try to -# autogenerate this at build time instead. -genrule( - name = "gen_crosstool", - # This is a proto, not a bzl file, but close enough. - srcs = ["//verify/boilerplate:boilerplate.bzl.txt"], - outs = ["generated_CROSSTOOL.textpb"], - cmd = "$(location //tools/generate_crosstool) --boilerplate=$< --out=$@", - tools = ["//tools/generate_crosstool"], - visibility = ["//verify:__pkg__"], -) - -exports_files(["CROSSTOOL"]) - -[cc_toolchain( - name = "cc-gcc-" + cpu, - all_files = ":empty", - compiler_files = ":empty", - dwp_files = ":empty", - linker_files = ":empty", - objcopy_files = ":empty", - strip_files = ":empty", - toolchain_identifier = toolchain_identifier, -) for (cpu, toolchain_identifier) in [ - ("k8", "host"), - ("arm", "cross-arm-linux-gnueabihf"), - ("aarch64", "cross-aarch64-linux-gnu"), - ("powerpc64le", "cross-powerpc64le-linux-gnu"), - ("s390x", "cross-s390x-linux-gnu"), -]] - -cc_toolchain_suite( - name = "toolchain", - # Use both the bazely CPU names and goarchy CPU names - toolchains = { - "amd64|gcc": ":cc-gcc-k8", - "amd64": ":cc-gcc-k8", - "k8|gcc": ":cc-gcc-k8", - "k8": ":cc-gcc-k8", - "arm|gcc": ":cc-gcc-arm", - "arm": ":cc-gcc-arm", - "aarch64|gcc": ":cc-gcc-aarch64", - "aarch64": ":cc-gcc-aarch64", - "arm64|gcc": ":cc-gcc-aarch64", - "arm64": ":cc-gcc-aarch64", - "powerpc64le|gcc": ":cc-gcc-powerpc64le", - "powerpc64le": ":cc-gcc-powerpc64le", - "ppc64le|gcc": ":cc-gcc-powerpc64le", - "ppc64le": ":cc-gcc-powerpc64le", - "s390x|gcc": ":cc-gcc-s390x", - "s390x": ":cc-gcc-s390x", - }, - visibility = ["//visibility:public"], -) - -filegroup( - name = "empty", - srcs = [], -) diff --git a/vendor/repo-infra/tools/CROSSTOOL b/vendor/repo-infra/tools/CROSSTOOL deleted file mode 100755 index 84a2c5c724..0000000000 --- a/vendor/repo-infra/tools/CROSSTOOL +++ /dev/null @@ -1,512 +0,0 @@ -# Copyright YEAR The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# DO NOT EDIT -# This file contains the text format encoding of a -# com.google.devtools.build.lib.view.config.crosstool.CrosstoolRelease -# protocol buffer generated by generate_crosstool. - -major_version: "local" -minor_version: "" -toolchain: < - toolchain_identifier: "host" - host_system_name: "host" - target_system_name: "host" - target_cpu: "k8" - target_libc: "k8-local" - compiler: "gcc" - abi_version: "k8-local" - abi_libc_version: "k8-local" - tool_path: < - name: "ar" - path: "/usr/bin/ar" - > - tool_path: < - name: "ld" - path: "/usr/bin/ld" - > - tool_path: < - name: "cpp" - path: "/usr/bin/cpp" - > - tool_path: < - name: "dwp" - path: "/usr/bin/dwp" - > - tool_path: < - name: "gcc" - path: "/usr/bin/gcc" - > - tool_path: < - name: "gcov" - path: "/usr/bin/gcov" - > - tool_path: < - name: "ld" - path: "/usr/bin/ld" - > - tool_path: < - name: "nm" - path: "/usr/bin/nm" - > - tool_path: < - name: "objcopy" - path: "/usr/bin/objcopy" - > - tool_path: < - name: "objdump" - path: "/usr/bin/objdump" - > - tool_path: < - name: "strip" - path: "/usr/bin/strip" - > - supports_gold_linker: false - supports_start_end_lib: false - supports_interface_shared_objects: false - supports_incremental_linker: false - supports_normalizing_ar: false - supports_fission: false - needsPic: true - compiler_flag: "-U_FORTIFY_SOURCE" - compiler_flag: "-D_FORTIFY_SOURCE=1" - compiler_flag: "-fstack-protector" - compiler_flag: "-Wall" - compiler_flag: "-Wunused-but-set-parameter" - compiler_flag: "-Wno-free-nonheap-object" - compiler_flag: "-fno-omit-frame-pointer" - unfiltered_cxx_flag: "-no-canonical-prefixes" - unfiltered_cxx_flag: "-fno-canonical-system-headers" - unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" - unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" - linker_flag: "-Wl,-z,relro,-z,now" - linker_flag: "-no-canonical-prefixes" - linker_flag: "-pass-exit-codes" - objcopy_embed_flag: "-I" - objcopy_embed_flag: "binary" - compilation_mode_flags: < - mode: DBG - compiler_flag: "-g" - > - compilation_mode_flags: < - mode: OPT - compiler_flag: "-g0" - compiler_flag: "-O2" - compiler_flag: "-DNDEBUG" - compiler_flag: "-ffunction-sections" - compiler_flag: "-fdata-sections" - linker_flag: "-Wl,--gc-sections" - > - linking_mode_flags: < - mode: DYNAMIC - > - cxx_builtin_include_directory: "/usr/lib/gcc" - cxx_builtin_include_directory: "/usr/local/include" - cxx_builtin_include_directory: "/usr/include" - builtin_sysroot: "" -> -toolchain: < - toolchain_identifier: "cross-arm-linux-gnueabihf" - host_system_name: "host" - target_system_name: "cross-arm-linux-gnueabihf" - target_cpu: "arm" - target_libc: "arm-linux-gnueabihf" - compiler: "gcc" - abi_version: "arm-linux-gnueabihf" - abi_libc_version: "arm-linux-gnueabihf" - tool_path: < - name: "ar" - path: "/usr/bin/arm-linux-gnueabihf-ar" - > - tool_path: < - name: "ld" - path: "/usr/bin/arm-linux-gnueabihf-ld" - > - tool_path: < - name: "cpp" - path: "/usr/bin/arm-linux-gnueabihf-cpp" - > - tool_path: < - name: "dwp" - path: "/usr/bin/arm-linux-gnueabihf-dwp" - > - tool_path: < - name: "gcc" - path: "/usr/bin/arm-linux-gnueabihf-gcc" - > - tool_path: < - name: "gcov" - path: "/usr/bin/arm-linux-gnueabihf-gcov" - > - tool_path: < - name: "ld" - path: "/usr/bin/arm-linux-gnueabihf-ld" - > - tool_path: < - name: "nm" - path: "/usr/bin/arm-linux-gnueabihf-nm" - > - tool_path: < - name: "objcopy" - path: "/usr/bin/arm-linux-gnueabihf-objcopy" - > - tool_path: < - name: "objdump" - path: "/usr/bin/arm-linux-gnueabihf-objdump" - > - tool_path: < - name: "strip" - path: "/usr/bin/arm-linux-gnueabihf-strip" - > - supports_gold_linker: false - supports_start_end_lib: false - supports_interface_shared_objects: false - supports_incremental_linker: false - supports_normalizing_ar: false - supports_fission: false - needsPic: true - compiler_flag: "-U_FORTIFY_SOURCE" - compiler_flag: "-D_FORTIFY_SOURCE=1" - compiler_flag: "-fstack-protector" - compiler_flag: "-Wall" - compiler_flag: "-Wunused-but-set-parameter" - compiler_flag: "-Wno-free-nonheap-object" - compiler_flag: "-fno-omit-frame-pointer" - unfiltered_cxx_flag: "-no-canonical-prefixes" - unfiltered_cxx_flag: "-fno-canonical-system-headers" - unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" - unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" - linker_flag: "-Wl,-z,relro,-z,now" - linker_flag: "-no-canonical-prefixes" - linker_flag: "-pass-exit-codes" - objcopy_embed_flag: "-I" - objcopy_embed_flag: "binary" - compilation_mode_flags: < - mode: DBG - compiler_flag: "-g" - > - compilation_mode_flags: < - mode: OPT - compiler_flag: "-g0" - compiler_flag: "-O2" - compiler_flag: "-DNDEBUG" - compiler_flag: "-ffunction-sections" - compiler_flag: "-fdata-sections" - linker_flag: "-Wl,--gc-sections" - > - linking_mode_flags: < - mode: DYNAMIC - > - cxx_builtin_include_directory: "/usr/arm-linux-gnueabihf/include" - cxx_builtin_include_directory: "/usr/lib/gcc-cross/arm-linux-gnueabihf" - builtin_sysroot: "" -> -toolchain: < - toolchain_identifier: "cross-aarch64-linux-gnu" - host_system_name: "host" - target_system_name: "cross-aarch64-linux-gnu" - target_cpu: "aarch64" - target_libc: "aarch64-linux-gnu" - compiler: "gcc" - abi_version: "aarch64-linux-gnu" - abi_libc_version: "aarch64-linux-gnu" - tool_path: < - name: "ar" - path: "/usr/bin/aarch64-linux-gnu-ar" - > - tool_path: < - name: "ld" - path: "/usr/bin/aarch64-linux-gnu-ld" - > - tool_path: < - name: "cpp" - path: "/usr/bin/aarch64-linux-gnu-cpp" - > - tool_path: < - name: "dwp" - path: "/usr/bin/aarch64-linux-gnu-dwp" - > - tool_path: < - name: "gcc" - path: "/usr/bin/aarch64-linux-gnu-gcc" - > - tool_path: < - name: "gcov" - path: "/usr/bin/aarch64-linux-gnu-gcov" - > - tool_path: < - name: "ld" - path: "/usr/bin/aarch64-linux-gnu-ld" - > - tool_path: < - name: "nm" - path: "/usr/bin/aarch64-linux-gnu-nm" - > - tool_path: < - name: "objcopy" - path: "/usr/bin/aarch64-linux-gnu-objcopy" - > - tool_path: < - name: "objdump" - path: "/usr/bin/aarch64-linux-gnu-objdump" - > - tool_path: < - name: "strip" - path: "/usr/bin/aarch64-linux-gnu-strip" - > - supports_gold_linker: false - supports_start_end_lib: false - supports_interface_shared_objects: false - supports_incremental_linker: false - supports_normalizing_ar: false - supports_fission: false - needsPic: true - compiler_flag: "-U_FORTIFY_SOURCE" - compiler_flag: "-D_FORTIFY_SOURCE=1" - compiler_flag: "-fstack-protector" - compiler_flag: "-Wall" - compiler_flag: "-Wunused-but-set-parameter" - compiler_flag: "-Wno-free-nonheap-object" - compiler_flag: "-fno-omit-frame-pointer" - unfiltered_cxx_flag: "-no-canonical-prefixes" - unfiltered_cxx_flag: "-fno-canonical-system-headers" - unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" - unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" - linker_flag: "-Wl,-z,relro,-z,now" - linker_flag: "-no-canonical-prefixes" - linker_flag: "-pass-exit-codes" - objcopy_embed_flag: "-I" - objcopy_embed_flag: "binary" - compilation_mode_flags: < - mode: DBG - compiler_flag: "-g" - > - compilation_mode_flags: < - mode: OPT - compiler_flag: "-g0" - compiler_flag: "-O2" - compiler_flag: "-DNDEBUG" - compiler_flag: "-ffunction-sections" - compiler_flag: "-fdata-sections" - linker_flag: "-Wl,--gc-sections" - > - linking_mode_flags: < - mode: DYNAMIC - > - cxx_builtin_include_directory: "/usr/aarch64-linux-gnu/include" - cxx_builtin_include_directory: "/usr/lib/gcc-cross/aarch64-linux-gnu" - builtin_sysroot: "" -> -toolchain: < - toolchain_identifier: "cross-powerpc64le-linux-gnu" - host_system_name: "host" - target_system_name: "cross-powerpc64le-linux-gnu" - target_cpu: "powerpc64le" - target_libc: "powerpc64le-linux-gnu" - compiler: "gcc" - abi_version: "powerpc64le-linux-gnu" - abi_libc_version: "powerpc64le-linux-gnu" - tool_path: < - name: "ar" - path: "/usr/bin/powerpc64le-linux-gnu-ar" - > - tool_path: < - name: "ld" - path: "/usr/bin/powerpc64le-linux-gnu-ld" - > - tool_path: < - name: "cpp" - path: "/usr/bin/powerpc64le-linux-gnu-cpp" - > - tool_path: < - name: "dwp" - path: "/usr/bin/powerpc64le-linux-gnu-dwp" - > - tool_path: < - name: "gcc" - path: "/usr/bin/powerpc64le-linux-gnu-gcc" - > - tool_path: < - name: "gcov" - path: "/usr/bin/powerpc64le-linux-gnu-gcov" - > - tool_path: < - name: "ld" - path: "/usr/bin/powerpc64le-linux-gnu-ld" - > - tool_path: < - name: "nm" - path: "/usr/bin/powerpc64le-linux-gnu-nm" - > - tool_path: < - name: "objcopy" - path: "/usr/bin/powerpc64le-linux-gnu-objcopy" - > - tool_path: < - name: "objdump" - path: "/usr/bin/powerpc64le-linux-gnu-objdump" - > - tool_path: < - name: "strip" - path: "/usr/bin/powerpc64le-linux-gnu-strip" - > - supports_gold_linker: false - supports_start_end_lib: false - supports_interface_shared_objects: false - supports_incremental_linker: false - supports_normalizing_ar: false - supports_fission: false - needsPic: true - compiler_flag: "-U_FORTIFY_SOURCE" - compiler_flag: "-D_FORTIFY_SOURCE=1" - compiler_flag: "-fstack-protector" - compiler_flag: "-Wall" - compiler_flag: "-Wunused-but-set-parameter" - compiler_flag: "-Wno-free-nonheap-object" - compiler_flag: "-fno-omit-frame-pointer" - unfiltered_cxx_flag: "-no-canonical-prefixes" - unfiltered_cxx_flag: "-fno-canonical-system-headers" - unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" - unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" - linker_flag: "-Wl,-z,relro,-z,now" - linker_flag: "-no-canonical-prefixes" - linker_flag: "-pass-exit-codes" - objcopy_embed_flag: "-I" - objcopy_embed_flag: "binary" - compilation_mode_flags: < - mode: DBG - compiler_flag: "-g" - > - compilation_mode_flags: < - mode: OPT - compiler_flag: "-g0" - compiler_flag: "-O2" - compiler_flag: "-DNDEBUG" - compiler_flag: "-ffunction-sections" - compiler_flag: "-fdata-sections" - linker_flag: "-Wl,--gc-sections" - > - linking_mode_flags: < - mode: DYNAMIC - > - cxx_builtin_include_directory: "/usr/powerpc64le-linux-gnu/include" - cxx_builtin_include_directory: "/usr/lib/gcc-cross/powerpc64le-linux-gnu" - builtin_sysroot: "" -> -toolchain: < - toolchain_identifier: "cross-s390x-linux-gnu" - host_system_name: "host" - target_system_name: "cross-s390x-linux-gnu" - target_cpu: "s390x" - target_libc: "s390x-linux-gnu" - compiler: "gcc" - abi_version: "s390x-linux-gnu" - abi_libc_version: "s390x-linux-gnu" - tool_path: < - name: "ar" - path: "/usr/bin/s390x-linux-gnu-ar" - > - tool_path: < - name: "ld" - path: "/usr/bin/s390x-linux-gnu-ld" - > - tool_path: < - name: "cpp" - path: "/usr/bin/s390x-linux-gnu-cpp" - > - tool_path: < - name: "dwp" - path: "/usr/bin/s390x-linux-gnu-dwp" - > - tool_path: < - name: "gcc" - path: "/usr/bin/s390x-linux-gnu-gcc" - > - tool_path: < - name: "gcov" - path: "/usr/bin/s390x-linux-gnu-gcov" - > - tool_path: < - name: "ld" - path: "/usr/bin/s390x-linux-gnu-ld" - > - tool_path: < - name: "nm" - path: "/usr/bin/s390x-linux-gnu-nm" - > - tool_path: < - name: "objcopy" - path: "/usr/bin/s390x-linux-gnu-objcopy" - > - tool_path: < - name: "objdump" - path: "/usr/bin/s390x-linux-gnu-objdump" - > - tool_path: < - name: "strip" - path: "/usr/bin/s390x-linux-gnu-strip" - > - supports_gold_linker: false - supports_start_end_lib: false - supports_interface_shared_objects: false - supports_incremental_linker: false - supports_normalizing_ar: false - supports_fission: false - needsPic: true - compiler_flag: "-U_FORTIFY_SOURCE" - compiler_flag: "-D_FORTIFY_SOURCE=1" - compiler_flag: "-fstack-protector" - compiler_flag: "-Wall" - compiler_flag: "-Wunused-but-set-parameter" - compiler_flag: "-Wno-free-nonheap-object" - compiler_flag: "-fno-omit-frame-pointer" - unfiltered_cxx_flag: "-no-canonical-prefixes" - unfiltered_cxx_flag: "-fno-canonical-system-headers" - unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" - unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" - linker_flag: "-Wl,-z,relro,-z,now" - linker_flag: "-no-canonical-prefixes" - linker_flag: "-pass-exit-codes" - objcopy_embed_flag: "-I" - objcopy_embed_flag: "binary" - compilation_mode_flags: < - mode: DBG - compiler_flag: "-g" - > - compilation_mode_flags: < - mode: OPT - compiler_flag: "-g0" - compiler_flag: "-O2" - compiler_flag: "-DNDEBUG" - compiler_flag: "-ffunction-sections" - compiler_flag: "-fdata-sections" - linker_flag: "-Wl,--gc-sections" - > - linking_mode_flags: < - mode: DYNAMIC - > - cxx_builtin_include_directory: "/usr/s390x-linux-gnu/include" - cxx_builtin_include_directory: "/usr/lib/gcc-cross/s390x-linux-gnu" - builtin_sysroot: "" -> diff --git a/vendor/repo-infra/tools/build_tar/BUILD.bazel b/vendor/repo-infra/tools/build_tar/BUILD.bazel deleted file mode 100644 index 70611ae030..0000000000 --- a/vendor/repo-infra/tools/build_tar/BUILD.bazel +++ /dev/null @@ -1,18 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") - -go_library( - name = "go_default_library", - srcs = ["buildtar.go"], - importpath = "k8s.io/repo-infra/tools/build_tar", - visibility = ["//visibility:private"], - deps = [ - "//vendor/golang.org/x/build/pargzip:go_default_library", - "//vendor/k8s.io/klog:go_default_library", - ], -) - -go_binary( - name = "build_tar", - embed = [":go_default_library"], - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/tools/build_tar/buildtar.go b/vendor/repo-infra/tools/build_tar/buildtar.go deleted file mode 100644 index 1877b39342..0000000000 --- a/vendor/repo-infra/tools/build_tar/buildtar.go +++ /dev/null @@ -1,611 +0,0 @@ -/* -Copyright 2017 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// fast tar builder for bazel -package main - -import ( - "archive/tar" - "bufio" - "compress/bzip2" - "compress/gzip" - "flag" - "fmt" - "io" - "io/ioutil" - "os" - "path/filepath" - "strconv" - "strings" - "time" - - "golang.org/x/build/pargzip" - - "k8s.io/klog" -) - -func main() { - var ( - flagfile string - - output string - directory string - compression string - - files multiString - tars multiString - debs multiString - links multiString - - mode string - modes multiString - - owner string - owners multiString - ownerName string - ownerNames multiString - - mtime string - ) - - flag.StringVar(&flagfile, "flagfile", "", "Path to flagfile") - - flag.StringVar(&output, "output", "", "The output file, mandatory") - flag.StringVar(&directory, "directory", "", "Directory in which to store the file inside the layer") - flag.StringVar(&compression, "compression", "", "Compression (`gz` or `bz2`), default is none.") - - flag.Var(&files, "file", "A file to add to the layer") - flag.Var(&tars, "tar", "A tar file to add to the layer") - flag.Var(&debs, "deb", "A debian package to add to the layer") - flag.Var(&links, "link", "Add a symlink a inside the layer ponting to b if a:b is specified") - - flag.StringVar(&mode, "mode", "", "Force the mode on the added files (in octal).") - flag.Var(&modes, "modes", "Specific mode to apply to specific file (from the file argument), e.g., path/to/file=0455.") - - flag.StringVar(&owner, "owner", "0.0", "Specify the numeric default owner of all files, e.g., 0.0") - flag.Var(&owners, "owners", "Specify the numeric owners of individual files, e.g. path/to/file=0.0.") - flag.StringVar(&ownerName, "owner_name", "", "Specify the owner name of all files, e.g. root.root.") - flag.Var(&ownerNames, "owner_names", "Specify the owner names of individual files, e.g. path/to/file=root.root.") - - flag.StringVar(&mtime, "mtime", "", - "mtime to set on tar file entries. May be an integer (corresponding to epoch seconds) or the value \"portable\", which will use the value 2000-01-01, usable with non *nix OSes") - - flag.Set("logtostderr", "true") - - flag.Parse() - - if flagfile != "" { - b, err := ioutil.ReadFile(flagfile) - if err != nil { - klog.Fatalf("couldn't read flagfile: %v", err) - } - cmdline := strings.Split(string(b), "\n") - flag.CommandLine.Parse(cmdline) - } - - if output == "" { - klog.Fatalf("--output flag is required") - } - - parsedMtime, err := parseMtimeFlag(mtime) - if err != nil { - klog.Fatalf("invalid value for --mtime: %s", mtime) - } - - meta := newFileMeta(mode, modes, owner, owners, ownerName, ownerNames, parsedMtime) - - tf, err := newTarFile(output, directory, compression, meta) - if err != nil { - klog.Fatalf("couldn't build tar: %v", err) - } - defer tf.Close() - - for _, file := range files { - parts := strings.SplitN(file, "=", 2) - if len(parts) != 2 { - klog.Fatalf("bad parts length for file %q", file) - } - if err := tf.addFile(parts[0], parts[1]); err != nil { - klog.Fatalf("couldn't add file: %v", err) - } - } - - for _, tar := range tars { - if err := tf.addTar(tar); err != nil { - klog.Fatalf("couldn't add tar: %v", err) - } - } - - for _, deb := range debs { - if err := tf.addDeb(deb); err != nil { - klog.Fatalf("couldn't add deb: %v", err) - } - } - - for _, link := range links { - parts := strings.SplitN(link, ":", 2) - if len(parts) != 2 { - klog.Fatalf("bad parts length for link %q", link) - } - if err := tf.addLink(parts[0], parts[1]); err != nil { - klog.Fatalf("couldn't add link: %v", err) - } - } -} - -type tarFile struct { - directory string - - tw *tar.Writer - - meta fileMeta - dirsMade map[string]struct{} - filesMade map[string]struct{} - - closers []func() -} - -func newTarFile(output, directory, compression string, meta fileMeta) (*tarFile, error) { - var ( - w io.Writer - closers []func() - ) - f, err := os.Create(output) - if err != nil { - return nil, err - } - closers = append(closers, func() { - f.Close() - }) - w = f - - buf := bufio.NewWriter(w) - closers = append(closers, func() { buf.Flush() }) - w = buf - - switch compression { - case "": - case "gz": - gzw := pargzip.NewWriter(w) - closers = append(closers, func() { gzw.Close() }) - w = gzw - case "bz2", "xz": - return nil, fmt.Errorf("%q compression is not supported yet", compression) - default: - return nil, fmt.Errorf("unknown compression %q", compression) - } - - tw := tar.NewWriter(w) - closers = append(closers, func() { tw.Close() }) - - return &tarFile{ - directory: directory, - tw: tw, - closers: closers, - meta: meta, - dirsMade: map[string]struct{}{}, - filesMade: map[string]struct{}{}, - }, nil -} - -func (f *tarFile) addFile(file, dest string) error { - dest = strings.TrimLeft(dest, "/") - dest = filepath.Clean(dest) - - uid := f.meta.getUID(dest) - gid := f.meta.getGID(dest) - uname := f.meta.getUname(dest) - gname := f.meta.getGname(dest) - - dest = filepath.Join(strings.TrimLeft(f.directory, "/"), dest) - dest = filepath.Clean(dest) - - if ok := f.tryReservePath(dest); !ok { - klog.Warningf("Duplicate file in archive: %v, picking first occurence", dest) - return nil - } - - info, err := os.Stat(file) - if err != nil { - return err - } - - mode := f.meta.getMode(dest) - // If mode is unspecified, derive the mode from the file's mode. - if mode == 0 { - mode = os.FileMode(0644) - if info.Mode().Perm()&os.FileMode(0111) != 0 { - mode = os.FileMode(0755) - } - } - - header := tar.Header{ - Name: dest, - Mode: int64(mode), - Uid: uid, - Gid: gid, - Size: 0, - Uname: uname, - Gname: gname, - ModTime: f.meta.modTime, - } - - if err := f.makeDirs(header); err != nil { - return err - } - - switch { - case info.Mode()&os.ModeSymlink != 0: - return fmt.Errorf("addFile: didn't expect symlink: %s", file) - case info.Mode()&os.ModeNamedPipe != 0: - return fmt.Errorf("addFile: didn't expect named pipe: %s", file) - case info.Mode()&os.ModeSocket != 0: - return fmt.Errorf("addFile: didn't expect socket: %s", file) - case info.Mode()&os.ModeDevice != 0: - return fmt.Errorf("addFile: didn't expect device: %s", file) - case info.Mode()&os.ModeDir != 0: - header.Typeflag = tar.TypeDir - if err := f.tw.WriteHeader(&header); err != nil { - return err - } - default: - //regular file - header.Typeflag = tar.TypeReg - b, err := ioutil.ReadFile(file) - if err != nil { - return err - } - header.Size = int64(len(b)) - if err := f.tw.WriteHeader(&header); err != nil { - return err - } - if _, err := f.tw.Write(b); err != nil { - return err - } - } - return nil -} - -func (f *tarFile) addLink(symlink, target string) error { - if ok := f.tryReservePath(symlink); !ok { - klog.Warningf("Duplicate file in archive: %v, picking first occurence", symlink) - return nil - } - header := tar.Header{ - Name: symlink, - Typeflag: tar.TypeSymlink, - Linkname: target, - Mode: int64(0777), // symlinks should always have 0777 mode - ModTime: f.meta.modTime, - } - if err := f.makeDirs(header); err != nil { - return err - } - return f.tw.WriteHeader(&header) -} - -func (f *tarFile) addTar(toAdd string) error { - root := "" - if f.directory != "/" { - root = f.directory - } - - var r io.Reader - - file, err := os.Open(toAdd) - if err != nil { - return err - } - defer file.Close() - r = file - - r = bufio.NewReader(r) - - switch { - case strings.HasSuffix(toAdd, "gz"): - gzr, err := gzip.NewReader(r) - if err != nil { - return err - } - r = gzr - case strings.HasSuffix(toAdd, "bz2"): - bz2r := bzip2.NewReader(r) - r = bz2r - case strings.HasSuffix(toAdd, "xz"): - return fmt.Errorf("%q decompression is not supported yet", toAdd) - default: - } - - tr := tar.NewReader(r) - - for { - header, err := tr.Next() - if err == io.EOF { - break - } - if err != nil { - return err - } - header.Name = filepath.Join(root, header.Name) - if header.Typeflag == tar.TypeDir && !strings.HasSuffix(header.Name, "/") { - header.Name = header.Name + "/" - } else if ok := f.tryReservePath(header.Name); !ok { - klog.Warningf("Duplicate file in archive: %v, picking first occurence", header.Name) - continue - } - // Create root directories with same permissions if missing. - // makeDirs keeps track of which directories exist, - // so it's safe to duplicate this here. - if err = f.makeDirs(*header); err != nil { - return err - } - // If this is a directory, then makeDirs already created it, - // so skip to the next entry. - if header.Typeflag == tar.TypeDir { - continue - } - err = f.tw.WriteHeader(header) - if err != nil { - return err - } - if _, err = io.Copy(f.tw, tr); err != nil { - return err - } - } - return nil -} - -func (f *tarFile) addDeb(toAdd string) error { - return fmt.Errorf("addDeb unimplemented") -} - -func (f *tarFile) makeDirs(header tar.Header) error { - dirToMake := []string{} - dir := header.Name - for { - dir = filepath.Dir(dir) - if dir == "." || dir == "/" { - break - } - dirToMake = append(dirToMake, dir) - } - for i := len(dirToMake) - 1; i >= 0; i-- { - dir := dirToMake[i] - if _, ok := f.dirsMade[dir]; ok { - continue - } - dh := header - // Add the x bit to directories if the read bit is set, - // and make sure all directories are at least user RWX. - dh.Mode = header.Mode | 0700 | ((0444 & header.Mode) >> 2) - dh.Typeflag = tar.TypeDir - dh.Name = dir + "/" - if err := f.tw.WriteHeader(&dh); err != nil { - return err - } - - f.dirsMade[dir] = struct{}{} - } - return nil -} - -func (f *tarFile) tryReservePath(path string) bool { - if _, ok := f.filesMade[path]; ok { - return false - } - if _, ok := f.dirsMade[path]; ok { - return false - } - f.filesMade[path] = struct{}{} - return true -} - -func (f *tarFile) Close() { - for i := len(f.closers) - 1; i >= 0; i-- { - f.closers[i]() - } -} - -// parseMtimeFlag matches the functionality of Bazel's python-based build_tar and archive modules -// for the --mtime flag. -// In particular: -// - if no value is provided, use the Unix epoch -// - if the string "portable" is provided, use a "deterministic date compatible with non *nix OSes" -// - if an integer is provided, interpret that as the number of seconds since Unix epoch -func parseMtimeFlag(input string) (time.Time, error) { - if input == "" { - return time.Unix(0, 0), nil - } else if input == "portable" { - // A deterministic time compatible with non *nix OSes. - // See also https://github.com/bazelbuild/bazel/issues/1299. - return time.Date(2000, 1, 1, 0, 0, 0, 0, time.UTC), nil - } - seconds, err := strconv.ParseInt(input, 10, 64) - if err != nil { - return time.Unix(0, 0), err - } - return time.Unix(seconds, 0), nil -} - -func newFileMeta( - mode string, - modes multiString, - owner string, - owners multiString, - ownerName string, - ownerNames multiString, - modTime time.Time, -) fileMeta { - meta := fileMeta{ - modTime: modTime, - } - - if mode != "" { - i, err := strconv.ParseUint(mode, 8, 32) - if err != nil { - klog.Fatalf("couldn't parse mode: %v", mode) - } - meta.defaultMode = os.FileMode(i) - } - - meta.modeMap = map[string]os.FileMode{} - for _, filemode := range modes { - parts := strings.SplitN(filemode, "=", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q", filemode) - } - if parts[0][0] == '/' { - parts[0] = parts[0][1:] - } - i, err := strconv.ParseUint(parts[1], 8, 32) - if err != nil { - klog.Fatalf("couldn't parse mode: %v", filemode) - } - meta.modeMap[parts[0]] = os.FileMode(i) - } - - if ownerName != "" { - parts := strings.SplitN(ownerName, ".", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q", ownerName) - } - meta.defaultUname = parts[0] - meta.defaultGname = parts[1] - } - - meta.unameMap = map[string]string{} - meta.gnameMap = map[string]string{} - for _, name := range ownerNames { - parts := strings.SplitN(name, "=", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q %v", name, parts) - } - filename, ownername := parts[0], parts[1] - - parts = strings.SplitN(ownername, ".", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q", name) - } - uname, gname := parts[0], parts[1] - - meta.unameMap[filename] = uname - meta.gnameMap[filename] = gname - } - - if owner != "" { - parts := strings.SplitN(owner, ".", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q", owner) - } - uid, err := strconv.Atoi(parts[0]) - if err != nil { - klog.Fatalf("could not parse uid: %q", parts[0]) - } - gid, err := strconv.Atoi(parts[1]) - if err != nil { - klog.Fatalf("could not parse gid: %q", parts[1]) - } - meta.defaultUID = uid - meta.defaultGID = gid - - } - - meta.uidMap = map[string]int{} - meta.gidMap = map[string]int{} - for _, owner := range owners { - parts := strings.SplitN(owner, "=", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q", owner) - } - filename, owner := parts[0], parts[1] - - parts = strings.SplitN(parts[1], ".", 2) - if len(parts) != 2 { - klog.Fatalf("expected two parts to %q", owner) - } - uid, err := strconv.Atoi(parts[0]) - if err != nil { - klog.Fatalf("could not parse uid: %q", parts[0]) - } - gid, err := strconv.Atoi(parts[1]) - if err != nil { - klog.Fatalf("could not parse gid: %q", parts[1]) - } - meta.uidMap[filename] = uid - meta.gidMap[filename] = gid - } - - return meta -} - -type fileMeta struct { - defaultGID, defaultUID int - gidMap, uidMap map[string]int - - defaultGname, defaultUname string - gnameMap, unameMap map[string]string - - defaultMode os.FileMode - modeMap map[string]os.FileMode - - modTime time.Time -} - -func (f *fileMeta) getGID(fname string) int { - if id, ok := f.gidMap[fname]; ok { - return id - } - return f.defaultGID -} - -func (f *fileMeta) getUID(fname string) int { - if id, ok := f.uidMap[fname]; ok { - return id - } - return f.defaultUID -} - -func (f *fileMeta) getGname(fname string) string { - if name, ok := f.gnameMap[fname]; ok { - return name - } - return f.defaultGname -} - -func (f *fileMeta) getUname(fname string) string { - if name, ok := f.unameMap[fname]; ok { - return name - } - return f.defaultUname -} - -func (f *fileMeta) getMode(fname string) os.FileMode { - if mode, ok := f.modeMap[fname]; ok { - return mode - } - return f.defaultMode -} - -type multiString []string - -func (ms *multiString) String() string { - return strings.Join(*ms, ",") -} - -func (ms *multiString) Set(v string) error { - *ms = append(*ms, v) - return nil -} diff --git a/vendor/repo-infra/tools/generate_crosstool/BUILD.bazel b/vendor/repo-infra/tools/generate_crosstool/BUILD.bazel deleted file mode 100644 index 458f0d8dc8..0000000000 --- a/vendor/repo-infra/tools/generate_crosstool/BUILD.bazel +++ /dev/null @@ -1,26 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") -load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") - -go_proto_library( - name = "crosstool_config_go_proto", - importpath = "github.com/bazelbuild/bazel/src/main/protobuf/crosstool_config_go_proto", - proto = "@io_bazel//src/main/protobuf:crosstool_config_proto", - visibility = ["//verify:__pkg__"], -) - -go_library( - name = "go_default_library", - srcs = ["main.go"], - importpath = "k8s.io/repo-infra/tools/generate_crosstool", - visibility = ["//visibility:private"], - deps = [ - ":crosstool_config_go_proto", - "@com_github_golang_protobuf//proto:go_default_library", - ], -) - -go_binary( - name = "generate_crosstool", - embed = [":go_default_library"], - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/tools/generate_crosstool/main.go b/vendor/repo-infra/tools/generate_crosstool/main.go deleted file mode 100644 index 4630376e8a..0000000000 --- a/vendor/repo-infra/tools/generate_crosstool/main.go +++ /dev/null @@ -1,238 +0,0 @@ -/* -Copyright 2019 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "flag" - "fmt" - "io" - "log" - "os" - - crosstoolpb "github.com/bazelbuild/bazel/src/main/protobuf/crosstool_config_go_proto" - "github.com/golang/protobuf/proto" -) - -var ( - out = flag.String("out", "", "filename for CROSSTOOL text proto to write") - boilerplate = flag.String("boilerplate", "", "file containing boilerplate header") - - // The common toolchain fields shared across all targeted platforms. - // This was auto-generated by Bazel in a docker container with gcc installed, - // then manually updated to remove unnecessary fields and override others where needed. - baseToolchain = ` - # These are required but will be overridden - toolchain_identifier: "" - target_system_name: "" - target_cpu: "" - target_libc: "" - compiler: "" - abi_version: "" - abi_libc_version: "" - - builtin_sysroot: "" - host_system_name: "host" - needsPic: true - supports_gold_linker: false - supports_incremental_linker: false - supports_fission: false - supports_interface_shared_objects: false - supports_normalizing_ar: false - supports_start_end_lib: false - - objcopy_embed_flag: "-I" - objcopy_embed_flag: "binary" - - # Anticipated future default. - unfiltered_cxx_flag: "-no-canonical-prefixes" - unfiltered_cxx_flag: "-fno-canonical-system-headers" - - # Make C++ compilation deterministic. Use linkstamping instead of these - # compiler symbols. - unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" - unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" - unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" - - # Security hardening on by default. - # Conservative choice; -D_FORTIFY_SOURCE=2 may be unsafe in some cases. - # We need to undef it before redefining it as some distributions now have - # it enabled by default. - compiler_flag: "-U_FORTIFY_SOURCE" - compiler_flag: "-D_FORTIFY_SOURCE=1" - compiler_flag: "-fstack-protector" - linker_flag: "-Wl,-z,relro,-z,now" - - # All warnings are enabled. Maybe enable -Werror as well? - compiler_flag: "-Wall" - # Enable a few more warnings that aren't part of -Wall. - compiler_flag: "-Wunused-but-set-parameter" - # But disable some that are problematic. - compiler_flag: "-Wno-free-nonheap-object" # has false positives - - # Keep stack frames for debugging, even in opt mode. - compiler_flag: "-fno-omit-frame-pointer" - - # Anticipated future default. - linker_flag: "-no-canonical-prefixes" - # Have gcc return the exit code from ld. - linker_flag: "-pass-exit-codes" - - compilation_mode_flags { - mode: DBG - # Enable debug symbols. - compiler_flag: "-g" - } - compilation_mode_flags { - mode: OPT - - # No debug symbols. - # Maybe we should enable https://gcc.gnu.org/wiki/DebugFission for opt or - # even generally? However, that can't happen here, as it requires special - # handling in Bazel. - compiler_flag: "-g0" - - # Conservative choice for -O - # -O3 can increase binary size and even slow down the resulting binaries. - # Profile first and / or use FDO if you need better performance than this. - compiler_flag: "-O2" - - # Disable assertions - compiler_flag: "-DNDEBUG" - - # Removal of unused code and data at link time (can this increase binary size in some cases?). - compiler_flag: "-ffunction-sections" - compiler_flag: "-fdata-sections" - linker_flag: "-Wl,--gc-sections" - } - linking_mode_flags { mode: DYNAMIC } -` -) - -func addToolchain(cpu, os string, cross bool) (*crosstoolpb.CToolchain, error) { - toolchain := &crosstoolpb.CToolchain{} - if err := proto.UnmarshalText(baseToolchain, toolchain); err != nil { - return nil, err - } - - var system string - if cross { - system = fmt.Sprintf("cross-%s-%s", cpu, os) - } else { - system = "host" - cpu = "k8" - } - compiler := "gcc" - libc := fmt.Sprintf("%s-%s", cpu, os) - toolchain.Compiler = proto.String(compiler) - toolchain.TargetLibc = proto.String(libc) - toolchain.TargetCpu = proto.String(cpu) - toolchain.TargetSystemName = proto.String(system) - toolchain.ToolchainIdentifier = proto.String(system) - toolchain.AbiVersion = proto.String(libc) - toolchain.AbiLibcVersion = proto.String(libc) - - tools := []string{ - "ar", "ld", "cpp", "dwp", "gcc", "gcov", "ld", - "nm", "objcopy", "objdump", "strip", - } - for _, tool := range tools { - var path string - if cross { - path = fmt.Sprintf("/usr/bin/%s-%s", libc, tool) - } else { - path = fmt.Sprintf("/usr/bin/%s", tool) - } - toolchain.ToolPath = append(toolchain.ToolPath, - &crosstoolpb.ToolPath{ - Name: proto.String(tool), - Path: proto.String(path), - }) - } - - if cross { - toolchain.CxxBuiltinIncludeDirectory = append( - toolchain.CxxBuiltinIncludeDirectory, - fmt.Sprintf("/usr/%s/include", libc), - fmt.Sprintf("/usr/lib/gcc-cross/%s", libc), - ) - } else { - toolchain.CxxBuiltinIncludeDirectory = append( - toolchain.CxxBuiltinIncludeDirectory, - "/usr/lib/gcc", - "/usr/local/include", - "/usr/include") - } - - return toolchain, nil -} - -func main() { - flag.Parse() - if *out == "" { - log.Fatalf("--out must be provided") - } - - crosstool := &crosstoolpb.CrosstoolRelease{ - MajorVersion: proto.String("local"), - MinorVersion: proto.String(""), - } - targets := []struct { - cpu string - libc string - cross bool - }{ - {"k8", "local", false}, - {"arm", "linux-gnueabihf", true}, - {"aarch64", "linux-gnu", true}, - {"powerpc64le", "linux-gnu", true}, - {"s390x", "linux-gnu", true}, - } - for _, t := range targets { - toolchain, err := addToolchain(t.cpu, t.libc, t.cross) - if err != nil { - log.Fatalf("error creating toolchain for target %v: %q", t, err) - } - crosstool.Toolchain = append(crosstool.Toolchain, toolchain) - - } - - f, err := os.Create(*out) - if err != nil { - log.Fatalf("failed to open %q for writing: %q", *out, err) - } - - if *boilerplate != "" { - bp, err := os.Open(*boilerplate) - if err != nil { - log.Fatalf("failed to open %q for reading: %q", *boilerplate, err) - } - defer bp.Close() - if _, err := io.Copy(f, bp); err != nil { - log.Fatalf("failed copying boilerplate: %q", err) - } - } - - fmt.Fprintf(f, `# DO NOT EDIT -# This file contains the text format encoding of a -# %s -# protocol buffer generated by generate_crosstool. - -`, - proto.MessageName(crosstool)) - proto.MarshalText(f, crosstool) -} diff --git a/vendor/repo-infra/vendor/BUILD.bazel b/vendor/repo-infra/vendor/BUILD.bazel deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/AUTHORS b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/AUTHORS deleted file mode 100644 index 5abe237dae..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/AUTHORS +++ /dev/null @@ -1,18 +0,0 @@ -# This is the official list of authors for copyright purposes. -# Names should be added to this file as: -# Name or Organization -# The email address is not required for organizations. - -Andy Hochhaus -Antoine Pelisse -GinFungYJF <645116215@qq.com> -Google Inc. -Improbable Worlds Ltd -Jeff Hodges -John Millikin -Melinda Lu -Peter McAlpine -RS -Rodrigo Queiro -Tom Payne -Yuki Yugui Sonoda diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/CONTRIBUTORS b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/CONTRIBUTORS deleted file mode 100644 index d4ff1901b8..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/CONTRIBUTORS +++ /dev/null @@ -1,29 +0,0 @@ -# People who have agreed to one of the CLAs and can contribute patches. -# The AUTHORS file lists the copyright holders; this file -# lists people. For example, Google employees are listed here -# but not in AUTHORS, because Google holds the copyright. -# -# https://developers.google.com/open-source/cla/individual -# https://developers.google.com/open-source/cla/corporate -# -# Names should be added to this file as: -# Name - -Ainsley Escorce-Jones -Andy Hochhaus -Antoine Pelisse -GinFungYJF <645116215@qq.com> -Ian Cottrell -Jay Conrod -Jeff Grafton -Jeff Hodges -John Millikin -Kristina -Melinda Lu -Paul Bethe -Peter McAlpine -Rodrigo Queiro -RS -Stefan Sakalik -Tom Payne -Yuki Yugui Sonoda diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/LICENSE b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/BUILD.bazel deleted file mode 100644 index d6f01fca58..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/BUILD.bazel +++ /dev/null @@ -1,39 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "diff.go", - "fix.go", - "fix-update.go", - "gazelle.go", - "langs.go", - "print.go", - "update-repos.go", - "version.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle", - importpath = "github.com/bazelbuild/bazel-gazelle/cmd/gazelle", - visibility = ["//visibility:private"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/config:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/flag:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/language:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/language/go:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/language/proto:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/merger:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/repo:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/resolve:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/walk:go_default_library", - "//vendor/github.com/pmezard/go-difflib/difflib:go_default_library", - ], -) - -go_binary( - name = "gazelle", - embed = [":go_default_library"], - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/diff.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/diff.go deleted file mode 100644 index 8be7c3341e..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/diff.go +++ /dev/null @@ -1,83 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "fmt" - "io" - "io/ioutil" - "os" - "path/filepath" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/rule" - "github.com/pmezard/go-difflib/difflib" -) - -func diffFile(c *config.Config, f *rule.File) error { - rel, err := filepath.Rel(c.RepoRoot, f.Path) - if err != nil { - return fmt.Errorf("error getting old path for file %q: %v", f.Path, err) - } - rel = filepath.ToSlash(rel) - - date := "1970-01-01 00:00:00.000000000 +0000" - diff := difflib.UnifiedDiff{ - Context: 3, - FromDate: date, - ToDate: date, - } - - if oldContent, err := ioutil.ReadFile(f.Path); err != nil && !os.IsNotExist(err) { - return fmt.Errorf("error reading original file: %v", err) - } else if err != nil { - diff.FromFile = "/dev/null" - } else if err == nil { - diff.A = difflib.SplitLines(string(oldContent)) - if c.ReadBuildFilesDir == "" { - path, err := filepath.Rel(c.RepoRoot, f.Path) - if err != nil { - return fmt.Errorf("error getting old path for file %q: %v", f.Path, err) - } - diff.FromFile = filepath.ToSlash(path) - } else { - diff.FromFile = f.Path - } - } - - newContent := f.Format() - diff.B = difflib.SplitLines(string(newContent)) - outPath := findOutputPath(c, f) - if c.WriteBuildFilesDir == "" { - path, err := filepath.Rel(c.RepoRoot, f.Path) - if err != nil { - return fmt.Errorf("error getting new path for file %q: %v", f.Path, err) - } - diff.ToFile = filepath.ToSlash(path) - } else { - diff.ToFile = outPath - } - - uc := getUpdateConfig(c) - var out io.Writer = os.Stdout - if uc.patchPath != "" { - out = &uc.patchBuffer - } - if err := difflib.WriteUnifiedDiff(out, diff); err != nil { - return fmt.Errorf("error diffing %s: %v", f.Path, err) - } - return nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix-update.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix-update.go deleted file mode 100644 index 8d136ccc2d..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix-update.go +++ /dev/null @@ -1,449 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "bytes" - "flag" - "fmt" - "io/ioutil" - "log" - "os" - "path/filepath" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - gzflag "github.com/bazelbuild/bazel-gazelle/flag" - "github.com/bazelbuild/bazel-gazelle/label" - "github.com/bazelbuild/bazel-gazelle/language" - "github.com/bazelbuild/bazel-gazelle/merger" - "github.com/bazelbuild/bazel-gazelle/repo" - "github.com/bazelbuild/bazel-gazelle/resolve" - "github.com/bazelbuild/bazel-gazelle/rule" - "github.com/bazelbuild/bazel-gazelle/walk" -) - -// updateConfig holds configuration information needed to run the fix and -// update commands. This includes everything in config.Config, but it also -// includes some additional fields that aren't relevant to other packages. -type updateConfig struct { - dirs []string - emit emitFunc - repos []repo.Repo - useIndex bool - walkMode walk.Mode - patchPath string - patchBuffer bytes.Buffer -} - -type emitFunc func(c *config.Config, f *rule.File) error - -var modeFromName = map[string]emitFunc{ - "print": printFile, - "fix": fixFile, - "diff": diffFile, -} - -const updateName = "_update" - -func getUpdateConfig(c *config.Config) *updateConfig { - return c.Exts[updateName].(*updateConfig) -} - -type updateConfigurer struct { - mode string - recursive bool -} - -func (ucr *updateConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) { - uc := &updateConfig{} - c.Exts[updateName] = uc - - c.ShouldFix = cmd == "fix" - - fs.StringVar(&ucr.mode, "mode", "fix", "print: prints all of the updated BUILD files\n\tfix: rewrites all of the BUILD files in place\n\tdiff: computes the rewrite but then just does a diff") - fs.BoolVar(&uc.useIndex, "index", true, "when true, gazelle will build an index of libraries in the workspace for dependency resolution") - fs.BoolVar(&ucr.recursive, "r", true, "when true, gazelle will update subdirectories recursively") - fs.StringVar(&uc.patchPath, "patch", "", "when set with -mode=diff, gazelle will write to a file instead of stdout") -} - -func (ucr *updateConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error { - uc := getUpdateConfig(c) - - var ok bool - uc.emit, ok = modeFromName[ucr.mode] - if !ok { - return fmt.Errorf("unrecognized emit mode: %q", ucr.mode) - } - if uc.patchPath != "" && ucr.mode != "diff" { - return fmt.Errorf("-patch set but -mode is %s, not diff", ucr.mode) - } - - dirs := fs.Args() - if len(dirs) == 0 { - dirs = []string{"."} - } - uc.dirs = make([]string, len(dirs)) - for i := range dirs { - dir, err := filepath.Abs(dirs[i]) - if err != nil { - return fmt.Errorf("%s: failed to find absolute path: %v", dirs[i], err) - } - dir, err = filepath.EvalSymlinks(dir) - if err != nil { - return fmt.Errorf("%s: failed to resolve symlinks: %v", dirs[i], err) - } - if !isDescendingDir(dir, c.RepoRoot) { - return fmt.Errorf("dir %q is not a subdirectory of repo root %q", dir, c.RepoRoot) - } - uc.dirs[i] = dir - } - - if ucr.recursive { - uc.walkMode = walk.VisitAllUpdateSubdirsMode - } else if uc.useIndex { - uc.walkMode = walk.VisitAllUpdateDirsMode - } else { - uc.walkMode = walk.UpdateDirsMode - } - - return nil -} - -func (ucr *updateConfigurer) KnownDirectives() []string { return nil } - -func (ucr *updateConfigurer) Configure(c *config.Config, rel string, f *rule.File) {} - -// visitRecord stores information about about a directory visited with -// packages.Walk. -type visitRecord struct { - // pkgRel is the slash-separated path to the visited directory, relative to - // the repository root. "" for the repository root itself. - pkgRel string - - // rules is a list of generated Go rules. - rules []*rule.Rule - - // imports contains opaque import information for each rule in rules. - imports []interface{} - - // empty is a list of empty Go rules that may be deleted. - empty []*rule.Rule - - // file is the build file being processed. - file *rule.File -} - -type byPkgRel []visitRecord - -func (vs byPkgRel) Len() int { return len(vs) } -func (vs byPkgRel) Less(i, j int) bool { return vs[i].pkgRel < vs[j].pkgRel } -func (vs byPkgRel) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] } - -var genericLoads = []rule.LoadInfo{ - { - Name: "@bazel_gazelle//:def.bzl", - Symbols: []string{"gazelle"}, - }, -} - -func runFixUpdate(cmd command, args []string) error { - cexts := make([]config.Configurer, 0, len(languages)+3) - cexts = append(cexts, - &config.CommonConfigurer{}, - &updateConfigurer{}, - &walk.Configurer{}, - &resolve.Configurer{}) - kindToResolver := make(map[string]resolve.Resolver) - kinds := make(map[string]rule.KindInfo) - loads := genericLoads - for _, lang := range languages { - cexts = append(cexts, lang) - for kind, info := range lang.Kinds() { - kindToResolver[kind] = lang - kinds[kind] = info - } - loads = append(loads, lang.Loads()...) - } - ruleIndex := resolve.NewRuleIndex(kindToResolver) - - c, err := newFixUpdateConfiguration(cmd, args, cexts, loads) - if err != nil { - return err - } - - if cmd == fixCmd { - // Only check the version when "fix" is run. Generated build files - // frequently work with older version of rules_go, and we don't want to - // nag too much since there's no way to disable this warning. - checkRulesGoVersion(c.RepoRoot) - } - - // Visit all directories in the repository. - var visits []visitRecord - uc := getUpdateConfig(c) - walk.Walk(c, cexts, uc.dirs, uc.walkMode, func(dir, rel string, c *config.Config, update bool, f *rule.File, subdirs, regularFiles, genFiles []string) { - // If this file is ignored or if Gazelle was not asked to update this - // directory, just index the build file and move on. - if !update { - if uc.useIndex && f != nil { - for _, r := range f.Rules { - ruleIndex.AddRule(c, r, f) - } - } - return - } - - // Fix any problems in the file. - if f != nil { - for _, l := range languages { - l.Fix(c, f) - } - } - - // Generate rules. - var empty, gen []*rule.Rule - var imports []interface{} - for _, l := range languages { - res := l.GenerateRules(language.GenerateArgs{ - Config: c, - Dir: dir, - Rel: rel, - File: f, - Subdirs: subdirs, - RegularFiles: regularFiles, - GenFiles: genFiles, - OtherEmpty: empty, - OtherGen: gen}) - if len(res.Gen) != len(res.Imports) { - log.Panicf("%s: language %s generated %d rules but returned %d imports", rel, l.Name(), len(res.Gen), len(res.Imports)) - } - empty = append(empty, res.Empty...) - gen = append(gen, res.Gen...) - imports = append(imports, res.Imports...) - } - if f == nil && len(gen) == 0 { - return - } - - // Insert or merge rules into the build file. - if f == nil { - f = rule.EmptyFile(filepath.Join(dir, c.DefaultBuildFileName()), rel) - for _, r := range gen { - r.Insert(f) - } - } else { - merger.MergeFile(f, empty, gen, merger.PreResolve, kinds) - } - visits = append(visits, visitRecord{ - pkgRel: rel, - rules: gen, - imports: imports, - empty: empty, - file: f, - }) - - // Add library rules to the dependency resolution table. - if uc.useIndex { - for _, r := range f.Rules { - ruleIndex.AddRule(c, r, f) - } - } - }) - - // Finish building the index for dependency resolution. - ruleIndex.Finish() - - // Resolve dependencies. - rc := repo.NewRemoteCache(uc.repos) - for _, v := range visits { - for i, r := range v.rules { - from := label.New(c.RepoName, v.pkgRel, r.Name()) - kindToResolver[r.Kind()].Resolve(c, ruleIndex, rc, r, v.imports[i], from) - } - merger.MergeFile(v.file, v.empty, v.rules, merger.PostResolve, kinds) - } - - // Emit merged files. - for _, v := range visits { - merger.FixLoads(v.file, loads) - if err := uc.emit(c, v.file); err != nil { - log.Print(err) - } - } - if uc.patchPath != "" { - if err := ioutil.WriteFile(uc.patchPath, uc.patchBuffer.Bytes(), 0666); err != nil { - return err - } - } - - return nil -} - -func newFixUpdateConfiguration(cmd command, args []string, cexts []config.Configurer, loads []rule.LoadInfo) (*config.Config, error) { - c := config.New() - - fs := flag.NewFlagSet("gazelle", flag.ContinueOnError) - // Flag will call this on any parse error. Don't print usage unless - // -h or -help were passed explicitly. - fs.Usage = func() {} - - var knownImports []string - fs.Var(&gzflag.MultiFlag{Values: &knownImports}, "known_import", "import path for which external resolution is skipped (can specify multiple times)") - - for _, cext := range cexts { - cext.RegisterFlags(fs, cmd.String(), c) - } - - if err := fs.Parse(args); err != nil { - if err == flag.ErrHelp { - fixUpdateUsage(fs) - return nil, err - } - // flag already prints the error; don't print it again. - log.Fatal("Try -help for more information.") - } - - for _, cext := range cexts { - if err := cext.CheckFlags(fs, c); err != nil { - return nil, err - } - } - - uc := getUpdateConfig(c) - workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE") - if workspace, err := rule.LoadFile(workspacePath, ""); err != nil { - if !os.IsNotExist(err) { - return nil, err - } - } else { - if err := fixWorkspace(c, workspace, loads); err != nil { - return nil, err - } - c.RepoName = findWorkspaceName(workspace) - uc.repos = repo.ListRepositories(workspace) - } - repoPrefixes := make(map[string]bool) - for _, r := range uc.repos { - repoPrefixes[r.GoPrefix] = true - } - for _, imp := range knownImports { - if repoPrefixes[imp] { - continue - } - repo := repo.Repo{ - Name: label.ImportPathToBazelRepoName(imp), - GoPrefix: imp, - } - uc.repos = append(uc.repos, repo) - } - - return c, nil -} - -func fixUpdateUsage(fs *flag.FlagSet) { - fmt.Fprint(os.Stderr, `usage: gazelle [fix|update] [flags...] [package-dirs...] - -The update command creates new build files and update existing BUILD files -when needed. - -The fix command also creates and updates build files, and in addition, it may -make potentially breaking updates to usage of rules. For example, it may -delete obsolete rules or rename existing rules. - -There are several output modes which can be selected with the -mode flag. The -output mode determines what Gazelle does with updated BUILD files. - - fix (default) - write updated BUILD files back to disk. - print - print updated BUILD files to stdout. - diff - diff updated BUILD files against existing files in unified format. - -Gazelle accepts a list of paths to Go package directories to process (defaults -to the working directory if none are given). It recursively traverses -subdirectories. All directories must be under the directory specified by --repo_root; if -repo_root is not given, this is the directory containing the -WORKSPACE file. - -FLAGS: - -`) - fs.PrintDefaults() -} - -func fixWorkspace(c *config.Config, workspace *rule.File, loads []rule.LoadInfo) error { - uc := getUpdateConfig(c) - if !c.ShouldFix { - return nil - } - shouldFix := false - for _, d := range uc.dirs { - if d == c.RepoRoot { - shouldFix = true - } - } - if !shouldFix { - return nil - } - - merger.FixWorkspace(workspace) - merger.FixLoads(workspace, loads) - if err := merger.CheckGazelleLoaded(workspace); err != nil { - return err - } - return uc.emit(c, workspace) -} - -func findWorkspaceName(f *rule.File) string { - for _, r := range f.Rules { - if r.Kind() == "workspace" { - return r.Name() - } - } - return "" -} - -func isDescendingDir(dir, root string) bool { - rel, err := filepath.Rel(root, dir) - if err != nil { - return false - } - if rel == "." { - return true - } - return !strings.HasPrefix(rel, "..") -} - -func findOutputPath(c *config.Config, f *rule.File) string { - if c.ReadBuildFilesDir == "" && c.WriteBuildFilesDir == "" { - return f.Path - } - baseDir := c.WriteBuildFilesDir - if c.WriteBuildFilesDir == "" { - baseDir = c.RepoRoot - } - outputDir := filepath.Join(baseDir, filepath.FromSlash(f.Pkg)) - defaultOutputPath := filepath.Join(outputDir, c.DefaultBuildFileName()) - files, err := ioutil.ReadDir(outputDir) - if err != nil { - // Ignore error. Directory probably doesn't exist. - return defaultOutputPath - } - outputPath := rule.MatchBuildFileName(outputDir, c.ValidBuildFileNames, files) - if outputPath == "" { - return defaultOutputPath - } - return outputPath -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix.go deleted file mode 100644 index 2b67bdd56e..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix.go +++ /dev/null @@ -1,33 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "io/ioutil" - "os" - "path/filepath" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func fixFile(c *config.Config, f *rule.File) error { - outPath := findOutputPath(c, f) - if err := os.MkdirAll(filepath.Dir(outPath), 0777); err != nil { - return err - } - return ioutil.WriteFile(outPath, f.Format(), 0666) -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/gazelle.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/gazelle.go deleted file mode 100644 index 5248c26f26..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/gazelle.go +++ /dev/null @@ -1,120 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Command gazelle is a BUILD file generator for Go projects. -// See "gazelle --help" for more details. -package main - -import ( - "flag" - "fmt" - "log" - "os" -) - -type command int - -const ( - updateCmd command = iota - fixCmd - updateReposCmd - helpCmd -) - -var commandFromName = map[string]command{ - "fix": fixCmd, - "help": helpCmd, - "update": updateCmd, - "update-repos": updateReposCmd, -} - -var nameFromCommand = []string{ - // keep in sync with definition above - "update", - "fix", - "update-repos", - "help", -} - -func (cmd command) String() string { - return nameFromCommand[cmd] -} - -func main() { - log.SetPrefix("gazelle: ") - log.SetFlags(0) // don't print timestamps - - if err := run(os.Args[1:]); err != nil && err != flag.ErrHelp { - log.Fatal(err) - } -} - -func run(args []string) error { - cmd := updateCmd - if len(args) == 1 && (args[0] == "-h" || args[0] == "-help" || args[0] == "--help") { - cmd = helpCmd - } else if len(args) > 0 { - c, ok := commandFromName[args[0]] - if ok { - cmd = c - args = args[1:] - } - } - - switch cmd { - case fixCmd, updateCmd: - return runFixUpdate(cmd, args) - case helpCmd: - return help() - case updateReposCmd: - return updateRepos(args) - default: - log.Panicf("unknown command: %v", cmd) - } - return nil -} - -func help() error { - fmt.Fprint(os.Stderr, `usage: gazelle [args...] - -Gazelle is a BUILD file generator for Go projects. It can create new BUILD files -for a project that follows "go build" conventions, and it can update BUILD files -if they already exist. It can be invoked directly in a project workspace, or -it can be run on an external dependency during the build as part of the -go_repository rule. - -Gazelle may be run with one of the commands below. If no command is given, -Gazelle defaults to "update". - - update - Gazelle will create new BUILD files or update existing BUILD files - if needed. - fix - in addition to the changes made in update, Gazelle will make potentially - breaking changes. For example, it may delete obsolete rules or rename - existing rules. - update-repos - updates repository rules in the WORKSPACE file. Run with - -h for details. - help - show this message. - -For usage information for a specific command, run the command with the -h flag. -For example: - - gazelle update -h - -Gazelle is under active development, and its interface may change -without notice. - -`) - return flag.ErrHelp -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/langs.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/langs.go deleted file mode 100644 index 0f8e112557..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/langs.go +++ /dev/null @@ -1,27 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "github.com/bazelbuild/bazel-gazelle/language" - "github.com/bazelbuild/bazel-gazelle/language/go" - "github.com/bazelbuild/bazel-gazelle/language/proto" -) - -var languages = []language.Language{ - proto.NewLanguage(), - golang.NewLanguage(), -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/print.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/print.go deleted file mode 100644 index bf99031124..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/print.go +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "os" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func printFile(c *config.Config, f *rule.File) error { - content := f.Format() - _, err := os.Stdout.Write(content) - return err -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/update-repos.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/update-repos.go deleted file mode 100644 index 63b48080b3..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/update-repos.go +++ /dev/null @@ -1,200 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "errors" - "flag" - "fmt" - "os" - "path/filepath" - "sync" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/merger" - "github.com/bazelbuild/bazel-gazelle/repo" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -type updateReposFn func(c *updateReposConfig, oldFile *rule.File, kinds map[string]rule.KindInfo) error - -type updateReposConfig struct { - fn updateReposFn - lockFilename string - importPaths []string -} - -const updateReposName = "_update-repos" - -func getUpdateReposConfig(c *config.Config) *updateReposConfig { - return c.Exts[updateReposName].(*updateReposConfig) -} - -type updateReposConfigurer struct{} - -func (_ *updateReposConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) { - uc := &updateReposConfig{} - c.Exts[updateReposName] = uc - fs.StringVar(&uc.lockFilename, "from_file", "", "Gazelle will translate repositories listed in this file into repository rules in WORKSPACE. Currently only dep's Gopkg.lock is supported.") -} - -func (_ *updateReposConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error { - uc := getUpdateReposConfig(c) - switch { - case uc.lockFilename != "": - if len(fs.Args()) != 0 { - return fmt.Errorf("Got %d positional arguments with -from_file; wanted 0.\nTry -help for more information.", len(fs.Args())) - } - uc.fn = importFromLockFile - - default: - if len(fs.Args()) == 0 { - return fmt.Errorf("No repositories specified\nTry -help for more information.") - } - uc.fn = updateImportPaths - uc.importPaths = fs.Args() - } - return nil -} - -func (_ *updateReposConfigurer) KnownDirectives() []string { return nil } - -func (_ *updateReposConfigurer) Configure(c *config.Config, rel string, f *rule.File) {} - -func updateRepos(args []string) error { - cexts := make([]config.Configurer, 0, len(languages)+2) - cexts = append(cexts, &config.CommonConfigurer{}, &updateReposConfigurer{}) - kinds := make(map[string]rule.KindInfo) - loads := []rule.LoadInfo{} - for _, lang := range languages { - cexts = append(cexts, lang) - loads = append(loads, lang.Loads()...) - for kind, info := range lang.Kinds() { - kinds[kind] = info - } - } - c, err := newUpdateReposConfiguration(args, cexts) - if err != nil { - return err - } - uc := getUpdateReposConfig(c) - - workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE") - f, err := rule.LoadFile(workspacePath, "") - if err != nil { - return fmt.Errorf("error loading %q: %v", workspacePath, err) - } - merger.FixWorkspace(f) - - if err := uc.fn(uc, f, kinds); err != nil { - return err - } - merger.FixLoads(f, loads) - if err := merger.CheckGazelleLoaded(f); err != nil { - return err - } - if err := f.Save(f.Path); err != nil { - return fmt.Errorf("error writing %q: %v", f.Path, err) - } - return nil -} - -func newUpdateReposConfiguration(args []string, cexts []config.Configurer) (*config.Config, error) { - c := config.New() - fs := flag.NewFlagSet("gazelle", flag.ContinueOnError) - // Flag will call this on any parse error. Don't print usage unless - // -h or -help were passed explicitly. - fs.Usage = func() {} - for _, cext := range cexts { - cext.RegisterFlags(fs, "update-repos", c) - } - if err := fs.Parse(args); err != nil { - if err == flag.ErrHelp { - updateReposUsage(fs) - return nil, err - } - // flag already prints the error; don't print it again. - return nil, errors.New("Try -help for more information") - } - for _, cext := range cexts { - if err := cext.CheckFlags(fs, c); err != nil { - return nil, err - } - } - return c, nil -} - -func updateReposUsage(fs *flag.FlagSet) { - fmt.Fprint(os.Stderr, `usage: - -# Add/update repositories by import path -gazelle update-repos example.com/repo1 example.com/repo2 - -# Import repositories from lock file -gazelle update-repos -from_file=file - -The update-repos command updates repository rules in the WORKSPACE file. -update-repos can add or update repositories explicitly by import path. -update-repos can also import repository rules from a vendoring tool's lock -file (currently only deps' Gopkg.lock is supported). - -FLAGS: - -`) -} - -func updateImportPaths(c *updateReposConfig, f *rule.File, kinds map[string]rule.KindInfo) error { - rs := repo.ListRepositories(f) - rc := repo.NewRemoteCache(rs) - - genRules := make([]*rule.Rule, len(c.importPaths)) - errs := make([]error, len(c.importPaths)) - var wg sync.WaitGroup - wg.Add(len(c.importPaths)) - for i, imp := range c.importPaths { - go func(i int, imp string) { - defer wg.Done() - r, err := repo.UpdateRepo(rc, imp) - if err != nil { - errs[i] = err - return - } - r.Remote = "" // don't set these explicitly - r.VCS = "" - rule := repo.GenerateRule(r) - genRules[i] = rule - }(i, imp) - } - wg.Wait() - - for _, err := range errs { - if err != nil { - return err - } - } - merger.MergeFile(f, nil, genRules, merger.PreResolve, kinds) - return nil -} - -func importFromLockFile(c *updateReposConfig, f *rule.File, kinds map[string]rule.KindInfo) error { - genRules, err := repo.ImportRepoRules(c.lockFilename) - if err != nil { - return err - } - - merger.MergeFile(f, nil, genRules, merger.PreResolve, kinds) - return nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/version.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/version.go deleted file mode 100644 index 0a7da2056a..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/version.go +++ /dev/null @@ -1,65 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package main - -import ( - "io/ioutil" - "log" - "path/filepath" - "regexp" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/internal/version" - "github.com/bazelbuild/bazel-gazelle/repo" -) - -var minimumRulesGoVersion = version.Version{0, 13, 0} - -// checkRulesGoVersion checks whether a compatible version of rules_go is -// being used in the workspace. A message will be logged if an incompatible -// version is found. -// -// Note that we can't always determine the version of rules_go in use. Also, -// if we find an incompatible version, we shouldn't bail out since the -// incompatibility may not matter in the current workspace. -func checkRulesGoVersion(repoRoot string) { - const message = `Gazelle may not be compatible with this version of rules_go. -Update io_bazel_rules_go to a newer version in your WORKSPACE file.` - - rulesGoPath, err := repo.FindExternalRepo(repoRoot, config.RulesGoRepoName) - if err != nil { - return - } - defBzlPath := filepath.Join(rulesGoPath, "go", "def.bzl") - defBzlContent, err := ioutil.ReadFile(defBzlPath) - if err != nil { - return - } - versionRe := regexp.MustCompile(`(?m)^RULES_GO_VERSION = ['"]([0-9.]*)['"]`) - match := versionRe.FindSubmatch(defBzlContent) - if match == nil { - log.Printf("RULES_GO_VERSION not found in @%s//go:def.bzl.\n%s", config.RulesGoRepoName, message) - return - } - vstr := string(match[1]) - v, err := version.ParseVersion(vstr) - if err != nil { - log.Printf("RULES_GO_VERSION %q could not be parsed in @%s//go:def.bzl.\n%s", vstr, config.RulesGoRepoName, message) - } - if v.Compare(minimumRulesGoVersion) < 0 { - log.Printf("Found RULES_GO_VERSION %s. Minimum compatible version is %s.\n%s", v, minimumRulesGoVersion, message) - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/BUILD.bazel deleted file mode 100644 index 84a97f8f00..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/BUILD.bazel +++ /dev/null @@ -1,16 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "config.go", - "constants.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config", - importpath = "github.com/bazelbuild/bazel-gazelle/config", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/config.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/config.go deleted file mode 100644 index 82feae033c..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/config.go +++ /dev/null @@ -1,210 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package config provides extensible configuration for Gazelle libraries. -// -// Packages may define Configurers which add support for new command-line -// options and directive comments in build files. Note that the -// language.Language interface embeds Configurer, so each language extension -// has the opportunity -// -// When Gazelle walks the directory trees in a repository, it calls the -// Configure method of each Configurer to produce a Config object. -// Config objects are passed as arguments to most functions in Gazelle, so -// this mechanism may be used to control many aspects of Gazelle's behavior. -package config - -import ( - "flag" - "fmt" - "path/filepath" - "strings" - - "github.com/bazelbuild/bazel-gazelle/internal/wspace" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// Config holds information about how Gazelle should run. This is based on -// command line arguments, directives, other hints in build files. -// -// A Config applies to a single directory. A Config is created for the -// repository root directory, then copied and modified for each subdirectory. -// -// Config itself contains only general information. Most configuration -// information is language-specific and is stored in Exts. This information -// is modified by extensions that implement Configurer. -type Config struct { - // RepoRoot is the absolute, canonical path to the root directory of the - // repository with all symlinks resolved. - RepoRoot string - - // RepoName is the name of the repository. - RepoName string - - // ReadBuildFilesDir is the absolute path to a directory where - // build files should be read from instead of RepoRoot. - ReadBuildFilesDir string - - // WriteBuildFilesDir is the absolute path to a directory where - // build files should be written to instead of RepoRoot. - WriteBuildFilesDir string - - // ValidBuildFileNames is a list of base names that are considered valid - // build files. Some repositories may have files named "BUILD" that are not - // used by Bazel and should be ignored. Must contain at least one string. - ValidBuildFileNames []string - - // ShouldFix determines whether Gazelle attempts to remove and replace - // usage of deprecated rules. - ShouldFix bool - - // Exts is a set of configurable extensions. Generally, each language - // has its own set of extensions, but other modules may provide their own - // extensions as well. Values in here may be populated by command line - // arguments, directives in build files, or other mechanisms. - Exts map[string]interface{} -} - -func New() *Config { - return &Config{ - ValidBuildFileNames: DefaultValidBuildFileNames, - Exts: make(map[string]interface{}), - } -} - -// Clone creates a copy of the configuration for use in a subdirectory. -// Note that the Exts map is copied, but its contents are not. -// Configurer.Configure should do this, if needed. -func (c *Config) Clone() *Config { - cc := *c - cc.Exts = make(map[string]interface{}) - for k, v := range c.Exts { - cc.Exts[k] = v - } - return &cc -} - -var DefaultValidBuildFileNames = []string{"BUILD.bazel", "BUILD"} - -// IsValidBuildFileName returns true if a file with the given base name -// should be treated as a build file. -func (c *Config) IsValidBuildFileName(name string) bool { - for _, n := range c.ValidBuildFileNames { - if name == n { - return true - } - } - return false -} - -// DefaultBuildFileName returns the base name used to create new build files. -func (c *Config) DefaultBuildFileName() string { - return c.ValidBuildFileNames[0] -} - -// Configurer is the interface for language or library-specific configuration -// extensions. Most (ideally all) modifications to Config should happen -// via this interface. -type Configurer interface { - // RegisterFlags registers command-line flags used by the extension. This - // method is called once with the root configuration when Gazelle - // starts. RegisterFlags may set an initial values in Config.Exts. When flags - // are set, they should modify these values. - RegisterFlags(fs *flag.FlagSet, cmd string, c *Config) - - // CheckFlags validates the configuration after command line flags are parsed. - // This is called once with the root configuration when Gazelle starts. - // CheckFlags may set default values in flags or make implied changes. - CheckFlags(fs *flag.FlagSet, c *Config) error - - // KnownDirectives returns a list of directive keys that this Configurer can - // interpret. Gazelle prints errors for directives that are not recoginized by - // any Configurer. - KnownDirectives() []string - - // Configure modifies the configuration using directives and other information - // extracted from a build file. Configure is called in each directory. - // - // c is the configuration for the current directory. It starts out as a copy - // of the configuration for the parent directory. - // - // rel is the slash-separated relative path from the repository root to - // the current directory. It is "" for the root directory itself. - // - // f is the build file for the current directory or nil if there is no - // existing build file. - Configure(c *Config, rel string, f *rule.File) -} - -// CommonConfigurer handles language-agnostic command-line flags and directives, -// i.e., those that apply to Config itself and not to Config.Exts. -type CommonConfigurer struct { - repoRoot, buildFileNames, readBuildFilesDir, writeBuildFilesDir string -} - -func (cc *CommonConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *Config) { - fs.StringVar(&cc.repoRoot, "repo_root", "", "path to a directory which corresponds to go_prefix, otherwise gazelle searches for it.") - fs.StringVar(&cc.buildFileNames, "build_file_name", strings.Join(DefaultValidBuildFileNames, ","), "comma-separated list of valid build file names.\nThe first element of the list is the name of output build files to generate.") - fs.StringVar(&cc.readBuildFilesDir, "experimental_read_build_files_dir", "", "path to a directory where build files should be read from (instead of -repo_root)") - fs.StringVar(&cc.writeBuildFilesDir, "experimental_write_build_files_dir", "", "path to a directory where build files should be written to (instead of -repo_root)") -} - -func (cc *CommonConfigurer) CheckFlags(fs *flag.FlagSet, c *Config) error { - var err error - if cc.repoRoot == "" { - cc.repoRoot, err = wspace.Find(".") - if err != nil { - return fmt.Errorf("-repo_root not specified, and WORKSPACE cannot be found: %v", err) - } - } - c.RepoRoot, err = filepath.Abs(cc.repoRoot) - if err != nil { - return fmt.Errorf("%s: failed to find absolute path of repo root: %v", cc.repoRoot, err) - } - c.RepoRoot, err = filepath.EvalSymlinks(c.RepoRoot) - if err != nil { - return fmt.Errorf("%s: failed to resolve symlinks: %v", cc.repoRoot, err) - } - c.ValidBuildFileNames = strings.Split(cc.buildFileNames, ",") - if cc.readBuildFilesDir != "" { - c.ReadBuildFilesDir, err = filepath.Abs(cc.readBuildFilesDir) - if err != nil { - return fmt.Errorf("%s: failed to find absolute path of -read_build_files_dir: %v", cc.readBuildFilesDir, err) - } - } - if cc.writeBuildFilesDir != "" { - c.WriteBuildFilesDir, err = filepath.Abs(cc.writeBuildFilesDir) - if err != nil { - return fmt.Errorf("%s: failed to find absolute path of -write_build_files_dir: %v", cc.writeBuildFilesDir, err) - } - } - - return nil -} - -func (cc *CommonConfigurer) KnownDirectives() []string { - return []string{"build_file_name"} -} - -func (cc *CommonConfigurer) Configure(c *Config, rel string, f *rule.File) { - if f == nil { - return - } - for _, d := range f.Directives { - if d.Key == "build_file_name" { - c.ValidBuildFileNames = strings.Split(d.Value, ",") - } - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/constants.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/constants.go deleted file mode 100644 index 7bbddf8228..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/config/constants.go +++ /dev/null @@ -1,27 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package config - -const ( - // RulesGoRepoName is the canonical name of the rules_go repository. It must - // match the workspace name in WORKSPACE. - // TODO(jayconrod): move to language/go. - RulesGoRepoName = "io_bazel_rules_go" - - // GazelleImportsKey is an internal attribute that lists imported packages - // on generated rules. It is replaced with "deps" during import resolution. - GazelleImportsKey = "_gazelle_imports" -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/BUILD.bazel deleted file mode 100644 index 2e05840f49..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/BUILD.bazel +++ /dev/null @@ -1,9 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["flag.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag", - importpath = "github.com/bazelbuild/bazel-gazelle/flag", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/flag.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/flag.go deleted file mode 100644 index 7e487fcdd7..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/flag/flag.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2017 The Bazel Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package flag provides some general-purpose types which satisfy the -// flag.Value interface. -package flag - -import ( - stdflag "flag" - "strings" -) - -// MultiFlag collects repeated string flags into a slice. -type MultiFlag struct { - Values *[]string -} - -var _ stdflag.Value = (*MultiFlag)(nil) - -func (m *MultiFlag) Set(v string) error { - *m.Values = append(*m.Values, v) - return nil -} - -func (m *MultiFlag) String() string { - if m == nil || m.Values == nil { - return "" - } - return strings.Join(*m.Values, ",") -} - -// ExplicitFlag is a string flag that tracks whether it was set. -type ExplicitFlag struct { - IsSet *bool - Value *string -} - -var _ stdflag.Value = (*ExplicitFlag)(nil) - -func (f *ExplicitFlag) Set(value string) error { - *f.IsSet = true - *f.Value = value - return nil -} - -func (f *ExplicitFlag) String() string { - if f == nil || f.Value == nil { - return "" - } - return *f.Value -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/BUILD.bazel deleted file mode 100644 index 0de5603d7f..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/BUILD.bazel +++ /dev/null @@ -1,9 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["version.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version", - importpath = "github.com/bazelbuild/bazel-gazelle/internal/version", - visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/version.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/version.go deleted file mode 100644 index 5c56fca72e..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/version/version.go +++ /dev/null @@ -1,72 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package version - -import ( - "fmt" - "strconv" - "strings" -) - -// Version is a tuple of non-negative integers that represents the version of -// a software package. -type Version []int - -func (v Version) String() string { - cstrs := make([]string, len(v)) - for i, cn := range v { - cstrs[i] = strconv.Itoa(cn) - } - return strings.Join(cstrs, ".") -} - -// Compare returns an integer comparing two versions lexicographically. -func (x Version) Compare(y Version) int { - n := len(x) - if len(y) < n { - n = len(y) - } - for i := 0; i < n; i++ { - cmp := x[i] - y[i] - if cmp != 0 { - return cmp - } - } - return len(x) - len(y) -} - -// ParseVersion parses a version of the form "12.34.56-abcd". Non-negative -// integer components are separated by dots. An arbitrary suffix may appear -// after '-', which is ignored. -func ParseVersion(vs string) (Version, error) { - i := strings.IndexByte(vs, '-') - if i >= 0 { - vs = vs[:i] - } - cstrs := strings.Split(vs, ".") - v := make(Version, len(cstrs)) - for i, cstr := range cstrs { - cn, err := strconv.Atoi(cstr) - if err != nil { - return nil, fmt.Errorf("could not parse version string: %q is not an integer", cstr) - } - if cn < 0 { - return nil, fmt.Errorf("could not parse version string: %q is negative", cstr) - } - v[i] = cn - } - return v, nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/BUILD.bazel deleted file mode 100644 index 28b2ee33bc..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/BUILD.bazel +++ /dev/null @@ -1,9 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["finder.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace", - importpath = "github.com/bazelbuild/bazel-gazelle/internal/wspace", - visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/finder.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/finder.go deleted file mode 100644 index 78935eb5d5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace/finder.go +++ /dev/null @@ -1,45 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package wspace provides functions to locate and modify a bazel WORKSPACE file. -package wspace - -import ( - "os" - "path/filepath" - "strings" -) - -const workspaceFile = "WORKSPACE" - -// Find searches from the given dir and up for the WORKSPACE file -// returning the directory containing it, or an error if none found in the tree. -func Find(dir string) (string, error) { - dir, err := filepath.Abs(dir) - if err != nil { - return "", err - } - - for { - _, err = os.Stat(filepath.Join(dir, workspaceFile)) - if err == nil { - return dir, nil - } - if !os.IsNotExist(err) { - return "", err - } - if strings.HasSuffix(dir, string(os.PathSeparator)) { // stop at root dir - return "", os.ErrNotExist - } - dir = filepath.Dir(dir) - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/BUILD.bazel deleted file mode 100644 index 330da28076..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/BUILD.bazel +++ /dev/null @@ -1,10 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["label.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label", - importpath = "github.com/bazelbuild/bazel-gazelle/label", - visibility = ["//visibility:public"], - deps = ["//vendor/github.com/bazelbuild/bazel-gazelle/pathtools:go_default_library"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/label.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/label.go deleted file mode 100644 index 0d8e6c0cb5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/label/label.go +++ /dev/null @@ -1,201 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package label provides utilities for parsing and manipulating -// Bazel labels. See -// https://docs.bazel.build/versions/master/build-ref.html#labels -// for more information. -package label - -import ( - "fmt" - "log" - "path" - "regexp" - "strings" - - "github.com/bazelbuild/bazel-gazelle/pathtools" -) - -// A Label represents a label of a build target in Bazel. Labels have three -// parts: a repository name, a package name, and a target name, formatted -// as @repo//pkg:target. -type Label struct { - // Repo is the repository name. If omitted, the label refers to a target - // in the current repository. - Repo string - - // Pkg is the package name, which is usually the directory that contains - // the target. If both Repo and Pkg are omitted, the label is relative. - Pkg string - - // Name is the name of the target the label refers to. If omitted, Name - // is assumed to be the same as Pkg. - Name string - - // Relative indicates whether the label refers to a target in the current - // package. Relative is true if and only if Repo and Pkg are both omitted. - Relative bool -} - -// New constructs a new label from components. -func New(repo, pkg, name string) Label { - return Label{Repo: repo, Pkg: pkg, Name: name} -} - -// NoLabel is the zero value of Label. It is not a valid label and may be -// returned when an error occurs. -var NoLabel = Label{} - -var ( - labelRepoRegexp = regexp.MustCompile(`^[A-Za-z][A-Za-z0-9_]*$`) - labelPkgRegexp = regexp.MustCompile(`^[A-Za-z0-9/._-]*$`) - labelNameRegexp = regexp.MustCompile(`^[A-Za-z0-9_/.+=,@~-]*$`) -) - -// Parse reads a label from a string. -// See https://docs.bazel.build/versions/master/build-ref.html#lexi. -func Parse(s string) (Label, error) { - origStr := s - - relative := true - var repo string - if strings.HasPrefix(s, "@") { - relative = false - endRepo := strings.Index(s, "//") - if endRepo < 0 { - return NoLabel, fmt.Errorf("label parse error: repository does not end with '//': %q", origStr) - } - repo = s[len("@"):endRepo] - if !labelRepoRegexp.MatchString(repo) { - return NoLabel, fmt.Errorf("label parse error: repository has invalid characters: %q", origStr) - } - s = s[endRepo:] - } - - var pkg string - if strings.HasPrefix(s, "//") { - relative = false - endPkg := strings.Index(s, ":") - if endPkg < 0 { - pkg = s[len("//"):] - s = "" - } else { - pkg = s[len("//"):endPkg] - s = s[endPkg:] - } - if !labelPkgRegexp.MatchString(pkg) { - return NoLabel, fmt.Errorf("label parse error: package has invalid characters: %q", origStr) - } - } - - if s == ":" { - return NoLabel, fmt.Errorf("label parse error: empty name: %q", origStr) - } - name := strings.TrimPrefix(s, ":") - if !labelNameRegexp.MatchString(name) { - return NoLabel, fmt.Errorf("label parse error: name has invalid characters: %q", origStr) - } - - if pkg == "" && name == "" { - return NoLabel, fmt.Errorf("label parse error: empty package and name: %q", origStr) - } - if name == "" { - name = path.Base(pkg) - } - - return Label{ - Repo: repo, - Pkg: pkg, - Name: name, - Relative: relative, - }, nil -} - -func (l Label) String() string { - if l.Relative { - return fmt.Sprintf(":%s", l.Name) - } - - var repo string - if l.Repo != "" { - repo = fmt.Sprintf("@%s", l.Repo) - } - - if path.Base(l.Pkg) == l.Name { - return fmt.Sprintf("%s//%s", repo, l.Pkg) - } - return fmt.Sprintf("%s//%s:%s", repo, l.Pkg, l.Name) -} - -// Abs computes an absolute label (one with a repository and package name) -// from this label. If this label is already absolute, it is returned -// unchanged. -func (l Label) Abs(repo, pkg string) Label { - if !l.Relative { - return l - } - return Label{Repo: repo, Pkg: pkg, Name: l.Name} -} - -// Rel attempts to compute a relative label from this label. If this label -// is already relative or is in a different package, this label may be -// returned unchanged. -func (l Label) Rel(repo, pkg string) Label { - if l.Relative || l.Repo != repo { - return l - } - if l.Pkg == pkg { - return Label{Name: l.Name, Relative: true} - } - return Label{Pkg: l.Pkg, Name: l.Name} -} - -// Equal returns whether two labels are exactly the same. It does not return -// true for different labels that refer to the same target. -func (l Label) Equal(other Label) bool { - return l.Repo == other.Repo && - l.Pkg == other.Pkg && - l.Name == other.Name && - l.Relative == other.Relative -} - -// Contains returns whether other is contained by the package of l or a -// sub-package. Neither label may be relative. -func (l Label) Contains(other Label) bool { - if l.Relative { - log.Panicf("l must not be relative: %s", l) - } - if other.Relative { - log.Panicf("other must not be relative: %s", other) - } - result := l.Repo == other.Repo && pathtools.HasPrefix(other.Pkg, l.Pkg) - return result -} - -// ImportPathToBazelRepoName converts a Go import path into a bazel repo name -// following the guidelines in http://bazel.io/docs/be/functions.html#workspace -func ImportPathToBazelRepoName(importpath string) string { - importpath = strings.ToLower(importpath) - components := strings.Split(importpath, "/") - labels := strings.Split(components[0], ".") - var reversed []string - for i := range labels { - l := labels[len(labels)-i-1] - reversed = append(reversed, l) - } - repo := strings.Join(append(reversed, components[1:]...), "_") - return strings.NewReplacer("-", "_", ".", "_").Replace(repo) -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/BUILD.bazel deleted file mode 100644 index 9e021618ef..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/BUILD.bazel +++ /dev/null @@ -1,14 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["lang.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language", - importpath = "github.com/bazelbuild/bazel-gazelle/language", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/config:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/resolve:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/BUILD.bazel deleted file mode 100644 index b034f62e84..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/BUILD.bazel +++ /dev/null @@ -1,34 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "config.go", - "constants.go", - "fileinfo.go", - "fix.go", - "generate.go", - "kinds.go", - "known_go_imports.go", - "known_proto_imports.go", - "lang.go", - "package.go", - "resolve.go", - "std_package_list.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go", - importpath = "github.com/bazelbuild/bazel-gazelle/language/go", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/config:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/flag:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/language:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/language/proto:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/pathtools:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/repo:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/resolve:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - "//vendor/github.com/bazelbuild/buildtools/build:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/config.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/config.go deleted file mode 100644 index f40bb07d63..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/config.go +++ /dev/null @@ -1,285 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import ( - "flag" - "fmt" - "go/build" - "log" - "path" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - gzflag "github.com/bazelbuild/bazel-gazelle/flag" - "github.com/bazelbuild/bazel-gazelle/language/proto" - "github.com/bazelbuild/bazel-gazelle/rule" - bzl "github.com/bazelbuild/buildtools/build" -) - -// goConfig contains configuration values related to Go rules. -type goConfig struct { - // genericTags is a set of tags that Gazelle considers to be true. Set with - // -build_tags or # gazelle:build_tags. Some tags, like gc, are always on. - genericTags map[string]bool - - // prefix is a prefix of an import path, used to generate importpath - // attributes. Set with -go_prefix or # gazelle:prefix. - prefix string - - // prefixRel is the package name of the directory where the prefix was set - // ("" for the root directory). - prefixRel string - - // prefixSet indicates whether the prefix was set explicitly. It is an error - // to infer an importpath for a rule without setting the prefix. - prefixSet bool - - // importMapPrefix is a prefix of a package path, used to generate importmap - // attributes. Set with # gazelle:importmap_prefix. - importMapPrefix string - - // importMapPrefixRel is the package name of the directory where importMapPrefix - // was set ("" for the root directory). - importMapPrefixRel string - - // depMode determines how imports that are not standard, indexed, or local - // (under the current prefix) should be resolved. - depMode dependencyMode -} - -func newGoConfig() *goConfig { - gc := &goConfig{} - gc.preprocessTags() - return gc -} - -func getGoConfig(c *config.Config) *goConfig { - return c.Exts[goName].(*goConfig) -} - -func (gc *goConfig) clone() *goConfig { - gcCopy := *gc - gcCopy.genericTags = make(map[string]bool) - for k, v := range gc.genericTags { - gcCopy.genericTags[k] = v - } - return &gcCopy -} - -// preprocessTags adds some tags which are on by default before they are -// used to match files. -func (gc *goConfig) preprocessTags() { - if gc.genericTags == nil { - gc.genericTags = make(map[string]bool) - } - gc.genericTags["gc"] = true -} - -// setBuildTags sets genericTags by parsing as a comma separated list. An -// error will be returned for tags that wouldn't be recognized by "go build". -// preprocessTags should be called before this. -func (gc *goConfig) setBuildTags(tags string) error { - if tags == "" { - return nil - } - for _, t := range strings.Split(tags, ",") { - if strings.HasPrefix(t, "!") { - return fmt.Errorf("build tags can't be negated: %s", t) - } - gc.genericTags[t] = true - } - return nil -} - -func getProtoMode(c *config.Config) proto.Mode { - if pc := proto.GetProtoConfig(c); pc != nil { - return pc.Mode - } else { - return proto.DisableGlobalMode - } -} - -// dependencyMode determines how imports of packages outside of the prefix -// are resolved. -type dependencyMode int - -const ( - // externalMode indicates imports should be resolved to external dependencies - // (declared in WORKSPACE). - externalMode dependencyMode = iota - - // vendorMode indicates imports should be resolved to libraries in the - // vendor directory. - vendorMode -) - -func (m dependencyMode) String() string { - if m == externalMode { - return "external" - } else { - return "vendored" - } -} - -type externalFlag struct { - depMode *dependencyMode -} - -func (f *externalFlag) Set(value string) error { - switch value { - case "external": - *f.depMode = externalMode - case "vendored": - *f.depMode = vendorMode - default: - return fmt.Errorf("unrecognized dependency mode: %q", value) - } - return nil -} - -func (f *externalFlag) String() string { - if f == nil || f.depMode == nil { - return "external" - } - return f.depMode.String() -} - -type tagsFlag func(string) error - -func (f tagsFlag) Set(value string) error { - return f(value) -} - -func (f tagsFlag) String() string { - return "" -} - -func (_ *goLang) KnownDirectives() []string { - return []string{ - "build_tags", - "importmap_prefix", - "prefix", - } -} - -func (_ *goLang) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) { - gc := newGoConfig() - switch cmd { - case "fix", "update": - fs.Var( - tagsFlag(gc.setBuildTags), - "build_tags", - "comma-separated list of build tags. If not specified, Gazelle will not\n\tfilter sources with build constraints.") - fs.Var( - &gzflag.ExplicitFlag{Value: &gc.prefix, IsSet: &gc.prefixSet}, - "go_prefix", - "prefix of import paths in the current workspace") - fs.Var( - &externalFlag{&gc.depMode}, - "external", - "external: resolve external packages with go_repository\n\tvendored: resolve external packages as packages in vendor/") - } - c.Exts[goName] = gc -} - -func (_ *goLang) CheckFlags(fs *flag.FlagSet, c *config.Config) error { - // The base of the -go_prefix flag may be used to generate proto_library - // rule names when there are no .proto sources (empty rules to be deleted) - // or when the package name can't be determined. - // TODO(jayconrod): deprecate and remove this behavior. - gc := getGoConfig(c) - if pc := proto.GetProtoConfig(c); pc != nil { - pc.GoPrefix = gc.prefix - } - return nil -} - -func (_ *goLang) Configure(c *config.Config, rel string, f *rule.File) { - var gc *goConfig - if raw, ok := c.Exts[goName]; !ok { - gc = newGoConfig() - } else { - gc = raw.(*goConfig).clone() - } - c.Exts[goName] = gc - - if path.Base(rel) == "vendor" { - gc.importMapPrefix = inferImportPath(gc, rel) - gc.importMapPrefixRel = rel - gc.prefix = "" - gc.prefixRel = rel - } - - if f != nil { - setPrefix := func(prefix string) { - if err := checkPrefix(prefix); err != nil { - log.Print(err) - return - } - gc.prefix = prefix - gc.prefixSet = true - gc.prefixRel = rel - } - for _, d := range f.Directives { - switch d.Key { - case "build_tags": - if err := gc.setBuildTags(d.Value); err != nil { - log.Print(err) - continue - } - gc.preprocessTags() - gc.setBuildTags(d.Value) - case "importmap_prefix": - gc.importMapPrefix = d.Value - gc.importMapPrefixRel = rel - case "prefix": - setPrefix(d.Value) - } - } - if !gc.prefixSet { - for _, r := range f.Rules { - switch r.Kind() { - case "go_prefix": - args := r.Args() - if len(args) != 1 { - continue - } - s, ok := args[0].(*bzl.StringExpr) - if !ok { - continue - } - setPrefix(s.Value) - - case "gazelle": - if prefix := r.AttrString("prefix"); prefix != "" { - setPrefix(prefix) - } - } - } - } - } -} - -// checkPrefix checks that a string may be used as a prefix. We forbid local -// (relative) imports and those beginning with "/". We allow the empty string, -// but generated rules must not have an empty importpath. -func checkPrefix(prefix string) error { - if strings.HasPrefix(prefix, "/") || build.IsLocalImport(prefix) { - return fmt.Errorf("invalid prefix: %q", prefix) - } - return nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/constants.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/constants.go deleted file mode 100644 index 14a343c260..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/constants.go +++ /dev/null @@ -1,44 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -const ( - // defaultLibName is the name of the default go_library rule in a Go - // package directory. This name was originally chosen so that rules_go - // could translate between Bazel labels and Go import paths using go_prefix. - // It is no longer needed since go_prefix was deleted. - defaultLibName = "go_default_library" - - // defaultTestName is a name of an internal test corresponding to - // defaultLibName. It does not need to be consistent to something but it - // just needs to be unique in the Bazel package - defaultTestName = "go_default_test" - - // legacyProtoFilegroupName is the anme of a filegroup created in legacy - // mode for libraries that contained .pb.go files and .proto files. - legacyProtoFilegroupName = "go_default_library_protos" - - // grpcCompilerLabel is the label for the gRPC compiler plugin, used in the - // "compilers" attribute of go_proto_library rules. - grpcCompilerLabel = "@io_bazel_rules_go//proto:go_grpc" - - // wellKnownTypesGoPrefix is the import path for the Go repository containing - // pre-generated code for the Well Known Types. - wellKnownTypesGoPrefix = "github.com/golang/protobuf" - - // wellKnownTypesPkg is the package name for the predefined WKTs in rules_go. - wellKnownTypesPkg = "proto/wkt" -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fileinfo.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fileinfo.go deleted file mode 100644 index 8aa106e4b5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fileinfo.go +++ /dev/null @@ -1,681 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import ( - "bufio" - "bytes" - "errors" - "fmt" - "go/ast" - "go/parser" - "go/token" - "log" - "os" - "path" - "path/filepath" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/language/proto" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// fileInfo holds information used to decide how to build a file. This -// information comes from the file's name, from package and import declarations -// (in .go files), and from +build and cgo comments. -type fileInfo struct { - path string - name string - - // ext is the type of file, based on extension. - ext ext - - // packageName is the Go package name of a .go file, without the - // "_test" suffix if it was present. It is empty for non-Go files. - packageName string - - // importPath is the canonical import path for this file's package. - // This may be read from a package comment (in Go) or a go_package - // option (in proto). This field is empty for files that don't specify - // an import path. - importPath string - - // isTest is true if the file stem (the part before the extension) - // ends with "_test.go". This is never true for non-Go files. - isTest bool - - // imports is a list of packages imported by a file. It does not include - // "C" or anything from the standard library. - imports []string - - // isCgo is true for .go files that import "C". - isCgo bool - - // goos and goarch contain the OS and architecture suffixes in the filename, - // if they were present. - goos, goarch string - - // tags is a list of build tag lines. Each entry is the trimmed text of - // a line after a "+build" prefix. - tags []tagLine - - // copts and clinkopts contain flags that are part of CFLAGS, CPPFLAGS, - // CXXFLAGS, and LDFLAGS directives in cgo comments. - copts, clinkopts []taggedOpts - - // hasServices indicates whether a .proto file has service definitions. - hasServices bool -} - -// tagLine represents the space-separated disjunction of build tag groups -// in a line comment. -type tagLine []tagGroup - -// check returns true if at least one of the tag groups is satisfied. -func (l tagLine) check(c *config.Config, os, arch string) bool { - if len(l) == 0 { - return false - } - for _, g := range l { - if g.check(c, os, arch) { - return true - } - } - return false -} - -// tagGroup represents a comma-separated conjuction of build tags. -type tagGroup []string - -// check returns true if all of the tags are true. Tags that start with -// "!" are negated (but "!!") is not allowed. Go release tags (e.g., "go1.8") -// are ignored. If the group contains an os or arch tag, but the os or arch -// parameters are empty, check returns false even if the tag is negated. -func (g tagGroup) check(c *config.Config, os, arch string) bool { - goConf := getGoConfig(c) - for _, t := range g { - if strings.HasPrefix(t, "!!") { // bad syntax, reject always - return false - } - not := strings.HasPrefix(t, "!") - if not { - t = t[1:] - } - if isIgnoredTag(t) { - // Release tags are treated as "unknown" and are considered true, - // whether or not they are negated. - continue - } - var match bool - if _, ok := rule.KnownOSSet[t]; ok { - if os == "" { - return false - } - match = os == t - } else if _, ok := rule.KnownArchSet[t]; ok { - if arch == "" { - return false - } - match = arch == t - } else { - match = goConf.genericTags[t] - } - if not { - match = !match - } - if !match { - return false - } - } - return true -} - -// taggedOpts a list of compile or link options which should only be applied -// if the given set of build tags are satisfied. These options have already -// been tokenized using the same algorithm that "go build" uses, then joined -// with OptSeparator. -type taggedOpts struct { - tags tagLine - opts string -} - -// optSeparator is a special character inserted between options that appeared -// together in a #cgo directive. This allows options to be split, modified, -// and escaped by other packages. -// -// It's important to keep options grouped together in the same string. For -// example, if we have "-framework IOKit" together in a #cgo directive, -// "-framework" shouldn't be treated as a separate string for the purposes of -// sorting and de-duplicating. -const optSeparator = "\x1D" - -// ext indicates how a file should be treated, based on extension. -type ext int - -const ( - // unknownExt is applied files that aren't buildable with Go. - unknownExt ext = iota - - // goExt is applied to .go files. - goExt - - // cExt is applied to C and C++ files. - cExt - - // hExt is applied to header files. If cgo code is present, these may be - // C or C++ headers. If not, they are treated as Go assembly headers. - hExt - - // sExt is applied to Go assembly files, ending with .s. - sExt - - // csExt is applied to other assembly files, ending with .S. These are built - // with the C compiler if cgo code is present. - csExt - - // protoExt is applied to .proto files. - protoExt -) - -// fileNameInfo returns information that can be inferred from the name of -// a file. It does not read data from the file. -func fileNameInfo(path_ string) fileInfo { - name := filepath.Base(path_) - var ext ext - switch path.Ext(name) { - case ".go": - ext = goExt - case ".c", ".cc", ".cpp", ".cxx", ".m", ".mm": - ext = cExt - case ".h", ".hh", ".hpp", ".hxx": - ext = hExt - case ".s": - ext = sExt - case ".S": - ext = csExt - case ".proto": - ext = protoExt - default: - ext = unknownExt - } - if strings.HasPrefix(name, ".") || strings.HasPrefix(name, "_") { - ext = unknownExt - } - - // Determine test, goos, and goarch. This is intended to match the logic - // in goodOSArchFile in go/build. - var isTest bool - var goos, goarch string - l := strings.Split(name[:len(name)-len(path.Ext(name))], "_") - if len(l) >= 2 && l[len(l)-1] == "test" { - isTest = ext == goExt - l = l[:len(l)-1] - } - switch { - case len(l) >= 3 && rule.KnownOSSet[l[len(l)-2]] && rule.KnownArchSet[l[len(l)-1]]: - goos = l[len(l)-2] - goarch = l[len(l)-1] - case len(l) >= 2 && rule.KnownOSSet[l[len(l)-1]]: - goos = l[len(l)-1] - case len(l) >= 2 && rule.KnownArchSet[l[len(l)-1]]: - goarch = l[len(l)-1] - } - - return fileInfo{ - path: path_, - name: name, - ext: ext, - isTest: isTest, - goos: goos, - goarch: goarch, - } -} - -// otherFileInfo returns information about a non-.go file. It will parse -// part of the file to determine build tags. If the file can't be read, an -// error will be logged, and partial information will be returned. -func otherFileInfo(path string) fileInfo { - info := fileNameInfo(path) - if info.ext == unknownExt { - return info - } - - tags, err := readTags(info.path) - if err != nil { - log.Printf("%s: error reading file: %v", info.path, err) - return info - } - info.tags = tags - return info -} - -// goFileInfo returns information about a .go file. It will parse part of the -// file to determine the package name, imports, and build constraints. -// If the file can't be read, an error will be logged, and partial information -// will be returned. -// This function is intended to match go/build.Context.Import. -// TODD(#53): extract canonical import path -func goFileInfo(path, rel string) fileInfo { - info := fileNameInfo(path) - fset := token.NewFileSet() - pf, err := parser.ParseFile(fset, info.path, nil, parser.ImportsOnly|parser.ParseComments) - if err != nil { - log.Printf("%s: error reading go file: %v", info.path, err) - return info - } - - info.packageName = pf.Name.Name - if info.isTest && strings.HasSuffix(info.packageName, "_test") { - info.packageName = info.packageName[:len(info.packageName)-len("_test")] - } - - for _, decl := range pf.Decls { - d, ok := decl.(*ast.GenDecl) - if !ok { - continue - } - for _, dspec := range d.Specs { - spec, ok := dspec.(*ast.ImportSpec) - if !ok { - continue - } - quoted := spec.Path.Value - path, err := strconv.Unquote(quoted) - if err != nil { - log.Printf("%s: error reading go file: %v", info.path, err) - continue - } - - if path == "C" { - if info.isTest { - log.Printf("%s: warning: use of cgo in test not supported", info.path) - } - info.isCgo = true - cg := spec.Doc - if cg == nil && len(d.Specs) == 1 { - cg = d.Doc - } - if cg != nil { - if err := saveCgo(&info, rel, cg); err != nil { - log.Printf("%s: error reading go file: %v", info.path, err) - } - } - continue - } - info.imports = append(info.imports, path) - } - } - - tags, err := readTags(info.path) - if err != nil { - log.Printf("%s: error reading go file: %v", info.path, err) - return info - } - info.tags = tags - - return info -} - -// saveCgo extracts CFLAGS, CPPFLAGS, CXXFLAGS, and LDFLAGS directives -// from a comment above a "C" import. This is intended to match logic in -// go/build.Context.saveCgo. -func saveCgo(info *fileInfo, rel string, cg *ast.CommentGroup) error { - text := cg.Text() - for _, line := range strings.Split(text, "\n") { - orig := line - - // Line is - // #cgo [GOOS/GOARCH...] LDFLAGS: stuff - // - line = strings.TrimSpace(line) - if len(line) < 5 || line[:4] != "#cgo" || (line[4] != ' ' && line[4] != '\t') { - continue - } - - // Split at colon. - line = strings.TrimSpace(line[4:]) - i := strings.Index(line, ":") - if i < 0 { - return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig) - } - line, optstr := strings.TrimSpace(line[:i]), strings.TrimSpace(line[i+1:]) - - // Parse tags and verb. - f := strings.Fields(line) - if len(f) < 1 { - return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig) - } - verb := f[len(f)-1] - tags := parseTagsInGroups(f[:len(f)-1]) - - // Parse options. - opts, err := splitQuoted(optstr) - if err != nil { - return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig) - } - var ok bool - for i, opt := range opts { - if opt, ok = expandSrcDir(opt, rel); !ok { - return fmt.Errorf("%s: malformed #cgo argument: %s", info.path, orig) - } - opts[i] = opt - } - joinedStr := strings.Join(opts, optSeparator) - - // Add tags to appropriate list. - switch verb { - case "CFLAGS", "CPPFLAGS", "CXXFLAGS": - info.copts = append(info.copts, taggedOpts{tags, joinedStr}) - case "LDFLAGS": - info.clinkopts = append(info.clinkopts, taggedOpts{tags, joinedStr}) - case "pkg-config": - return fmt.Errorf("%s: pkg-config not supported: %s", info.path, orig) - default: - return fmt.Errorf("%s: invalid #cgo verb: %s", info.path, orig) - } - } - return nil -} - -// splitQuoted splits the string s around each instance of one or more consecutive -// white space characters while taking into account quotes and escaping, and -// returns an array of substrings of s or an empty list if s contains only white space. -// Single quotes and double quotes are recognized to prevent splitting within the -// quoted region, and are removed from the resulting substrings. If a quote in s -// isn't closed err will be set and r will have the unclosed argument as the -// last element. The backslash is used for escaping. -// -// For example, the following string: -// -// a b:"c d" 'e''f' "g\"" -// -// Would be parsed as: -// -// []string{"a", "b:c d", "ef", `g"`} -// -// Copied from go/build.splitQuoted -func splitQuoted(s string) (r []string, err error) { - var args []string - arg := make([]rune, len(s)) - escaped := false - quoted := false - quote := '\x00' - i := 0 - for _, rune := range s { - switch { - case escaped: - escaped = false - case rune == '\\': - escaped = true - continue - case quote != '\x00': - if rune == quote { - quote = '\x00' - continue - } - case rune == '"' || rune == '\'': - quoted = true - quote = rune - continue - case unicode.IsSpace(rune): - if quoted || i > 0 { - quoted = false - args = append(args, string(arg[:i])) - i = 0 - } - continue - } - arg[i] = rune - i++ - } - if quoted || i > 0 { - args = append(args, string(arg[:i])) - } - if quote != 0 { - err = errors.New("unclosed quote") - } else if escaped { - err = errors.New("unfinished escaping") - } - return args, err -} - -// expandSrcDir expands any occurrence of ${SRCDIR}, making sure -// the result is safe for the shell. -// -// Copied from go/build.expandSrcDir -func expandSrcDir(str string, srcdir string) (string, bool) { - // "\" delimited paths cause safeCgoName to fail - // so convert native paths with a different delimiter - // to "/" before starting (eg: on windows). - srcdir = filepath.ToSlash(srcdir) - if srcdir == "" { - srcdir = "." - } - - // Spaces are tolerated in ${SRCDIR}, but not anywhere else. - chunks := strings.Split(str, "${SRCDIR}") - if len(chunks) < 2 { - return str, safeCgoName(str, false) - } - ok := true - for _, chunk := range chunks { - ok = ok && (chunk == "" || safeCgoName(chunk, false)) - } - ok = ok && (srcdir == "" || safeCgoName(srcdir, true)) - res := strings.Join(chunks, srcdir) - return res, ok && res != "" -} - -// NOTE: $ is not safe for the shell, but it is allowed here because of linker options like -Wl,$ORIGIN. -// We never pass these arguments to a shell (just to programs we construct argv for), so this should be okay. -// See golang.org/issue/6038. -// The @ is for OS X. See golang.org/issue/13720. -// The % is for Jenkins. See golang.org/issue/16959. -const safeString = "+-.,/0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz:$@%" -const safeSpaces = " " - -var safeBytes = []byte(safeSpaces + safeString) - -// Copied from go/build.safeCgoName -func safeCgoName(s string, spaces bool) bool { - if s == "" { - return false - } - safe := safeBytes - if !spaces { - safe = safe[len(safeSpaces):] - } - for i := 0; i < len(s); i++ { - if c := s[i]; c < utf8.RuneSelf && bytes.IndexByte(safe, c) < 0 { - return false - } - } - return true -} - -// readTags reads and extracts build tags from the block of comments -// and blank lines at the start of a file which is separated from the -// rest of the file by a blank line. Each string in the returned slice -// is the trimmed text of a line after a "+build" prefix. -// Based on go/build.Context.shouldBuild. -func readTags(path string) ([]tagLine, error) { - f, err := os.Open(path) - if err != nil { - return nil, err - } - defer f.Close() - scanner := bufio.NewScanner(f) - - // Pass 1: Identify leading run of // comments and blank lines, - // which must be followed by a blank line. - var lines []string - end := 0 - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if line == "" { - end = len(lines) - continue - } - if strings.HasPrefix(line, "//") { - lines = append(lines, line[len("//"):]) - continue - } - break - } - if err := scanner.Err(); err != nil { - return nil, err - } - lines = lines[:end] - - // Pass 2: Process each line in the run. - var tagLines []tagLine - for _, line := range lines { - fields := strings.Fields(line) - if len(fields) > 0 && fields[0] == "+build" { - tagLines = append(tagLines, parseTagsInGroups(fields[1:])) - } - } - return tagLines, nil -} - -func parseTagsInGroups(groups []string) tagLine { - var l tagLine - for _, g := range groups { - l = append(l, tagGroup(strings.Split(g, ","))) - } - return l -} - -func isOSArchSpecific(info fileInfo, cgoTags tagLine) (osSpecific, archSpecific bool) { - if info.goos != "" { - osSpecific = true - } - if info.goarch != "" { - archSpecific = true - } - lines := info.tags - if len(cgoTags) > 0 { - lines = append(lines, cgoTags) - } - for _, line := range lines { - for _, group := range line { - for _, tag := range group { - if strings.HasPrefix(tag, "!") { - tag = tag[1:] - } - _, osOk := rule.KnownOSSet[tag] - if osOk { - osSpecific = true - } - _, archOk := rule.KnownArchSet[tag] - if archOk { - archSpecific = true - } - } - } - } - return osSpecific, archSpecific -} - -// checkConstraints determines whether build constraints are satisfied on -// a given platform. -// -// The first few arguments describe the platform. genericTags is the set -// of build tags that are true on all platforms. os and arch are the platform -// GOOS and GOARCH strings. If os or arch is empty, checkConstraints will -// return false in the presence of OS and architecture constraints, even -// if they are negated. -// -// The remaining arguments describe the file being tested. All of these may -// be empty or nil. osSuffix and archSuffix are filename suffixes. fileTags -// is a list tags from +build comments found near the top of the file. cgoTags -// is an extra set of tags in a #cgo directive. -func checkConstraints(c *config.Config, os, arch, osSuffix, archSuffix string, fileTags []tagLine, cgoTags tagLine) bool { - if osSuffix != "" && osSuffix != os || archSuffix != "" && archSuffix != arch { - return false - } - for _, l := range fileTags { - if !l.check(c, os, arch) { - return false - } - } - if len(cgoTags) > 0 && !cgoTags.check(c, os, arch) { - return false - } - return true -} - -// isIgnoredTag returns whether the tag is "cgo" or is a release tag. -// Release tags match the pattern "go[0-9]\.[0-9]+". -// Gazelle won't consider whether an ignored tag is satisfied when evaluating -// build constraints for a file. -func isIgnoredTag(tag string) bool { - if tag == "cgo" || tag == "race" || tag == "msan" { - return true - } - if len(tag) < 5 || !strings.HasPrefix(tag, "go") { - return false - } - if tag[2] < '0' || tag[2] > '9' || tag[3] != '.' { - return false - } - for _, c := range tag[4:] { - if c < '0' || c > '9' { - return false - } - } - return true -} - -// protoFileInfo extracts metadata from a proto file. The proto extension -// already "parses" these and stores metadata in proto.FileInfo, so this is -// just processing relevant options. -func protoFileInfo(path_ string, protoInfo proto.FileInfo) fileInfo { - info := fileNameInfo(path_) - - // Look for "option go_package". If there's no / in the package option, then - // it's just a simple package name, not a full import path. - for _, opt := range protoInfo.Options { - if opt.Key != "go_package" { - continue - } - if strings.LastIndexByte(opt.Value, '/') == -1 { - info.packageName = opt.Value - } else { - if i := strings.LastIndexByte(opt.Value, ';'); i != -1 { - info.importPath = opt.Value[:i] - info.packageName = opt.Value[i+1:] - } else { - info.importPath = opt.Value - info.packageName = path.Base(opt.Value) - } - } - } - - // Set the Go package name from the proto package name if there was no - // option go_package. - if info.packageName == "" && protoInfo.PackageName != "" { - info.packageName = strings.Replace(protoInfo.PackageName, ".", "_", -1) - } - - info.imports = protoInfo.Imports - info.hasServices = protoInfo.HasServices - return info -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fix.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fix.go deleted file mode 100644 index 88d99963e1..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/fix.go +++ /dev/null @@ -1,253 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import ( - "log" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/language/proto" - "github.com/bazelbuild/bazel-gazelle/rule" - bzl "github.com/bazelbuild/buildtools/build" -) - -func (_ *goLang) Fix(c *config.Config, f *rule.File) { - migrateLibraryEmbed(c, f) - migrateGrpcCompilers(c, f) - flattenSrcs(c, f) - squashCgoLibrary(c, f) - squashXtest(c, f) - removeLegacyProto(c, f) - removeLegacyGazelle(c, f) -} - -// migrateLibraryEmbed converts "library" attributes to "embed" attributes, -// preserving comments. This only applies to Go rules, and only if there is -// no keep comment on "library" and no existing "embed" attribute. -func migrateLibraryEmbed(c *config.Config, f *rule.File) { - for _, r := range f.Rules { - if !isGoRule(r.Kind()) { - continue - } - libExpr := r.Attr("library") - if libExpr == nil || rule.ShouldKeep(libExpr) || r.Attr("embed") != nil { - continue - } - r.DelAttr("library") - r.SetAttr("embed", &bzl.ListExpr{List: []bzl.Expr{libExpr}}) - } -} - -// migrateGrpcCompilers converts "go_grpc_library" rules into "go_proto_library" -// rules with a "compilers" attribute. -func migrateGrpcCompilers(c *config.Config, f *rule.File) { - for _, r := range f.Rules { - if r.Kind() != "go_grpc_library" || r.ShouldKeep() || r.Attr("compilers") != nil { - continue - } - r.SetKind("go_proto_library") - r.SetAttr("compilers", []string{grpcCompilerLabel}) - } -} - -// squashCgoLibrary removes cgo_library rules with the default name and -// merges their attributes with go_library with the default name. If no -// go_library rule exists, a new one will be created. -// -// Note that the library attribute is disregarded, so cgo_library and -// go_library attributes will be squashed even if the cgo_library was unlinked. -// MergeFile will remove unused values and attributes later. -func squashCgoLibrary(c *config.Config, f *rule.File) { - // Find the default cgo_library and go_library rules. - var cgoLibrary, goLibrary *rule.Rule - for _, r := range f.Rules { - if r.Kind() == "cgo_library" && r.Name() == "cgo_default_library" && !r.ShouldKeep() { - if cgoLibrary != nil { - log.Printf("%s: when fixing existing file, multiple cgo_library rules with default name found", f.Path) - continue - } - cgoLibrary = r - continue - } - if r.Kind() == "go_library" && r.Name() == defaultLibName { - if goLibrary != nil { - log.Printf("%s: when fixing existing file, multiple go_library rules with default name referencing cgo_library found", f.Path) - } - goLibrary = r - continue - } - } - - if cgoLibrary == nil { - return - } - if !c.ShouldFix { - log.Printf("%s: cgo_library is deprecated. Run 'gazelle fix' to squash with go_library.", f.Path) - return - } - - if goLibrary == nil { - cgoLibrary.SetKind("go_library") - cgoLibrary.SetName(defaultLibName) - cgoLibrary.SetAttr("cgo", true) - return - } - - if err := rule.SquashRules(cgoLibrary, goLibrary, f.Path); err != nil { - log.Print(err) - return - } - goLibrary.DelAttr("embed") - goLibrary.SetAttr("cgo", true) - cgoLibrary.Delete() -} - -// squashXtest removes go_test rules with the default external name and merges -// their attributes with a go_test rule with the default internal name. If -// no internal go_test rule exists, a new one will be created (effectively -// renaming the old rule). -func squashXtest(c *config.Config, f *rule.File) { - // Search for internal and external tests. - var itest, xtest *rule.Rule - for _, r := range f.Rules { - if r.Kind() != "go_test" { - continue - } - if r.Name() == defaultTestName { - itest = r - } else if r.Name() == "go_default_xtest" { - xtest = r - } - } - - if xtest == nil || xtest.ShouldKeep() || (itest != nil && itest.ShouldKeep()) { - return - } - if !c.ShouldFix { - if itest == nil { - log.Printf("%s: go_default_xtest is no longer necessary. Run 'gazelle fix' to rename to go_default_test.", f.Path) - } else { - log.Printf("%s: go_default_xtest is no longer necessary. Run 'gazelle fix' to squash with go_default_test.", f.Path) - } - return - } - - // If there was no internal test, we can just rename the external test. - if itest == nil { - xtest.SetName(defaultTestName) - return - } - - // Attempt to squash. - if err := rule.SquashRules(xtest, itest, f.Path); err != nil { - log.Print(err) - return - } - xtest.Delete() -} - -// flattenSrcs transforms srcs attributes structured as concatenations of -// lists and selects (generated from PlatformStrings; see -// extractPlatformStringsExprs for matching details) into a sorted, -// de-duplicated list. Comments are accumulated and de-duplicated across -// duplicate expressions. -func flattenSrcs(c *config.Config, f *rule.File) { - for _, r := range f.Rules { - if !isGoRule(r.Kind()) { - continue - } - oldSrcs := r.Attr("srcs") - if oldSrcs == nil { - continue - } - flatSrcs := rule.FlattenExpr(oldSrcs) - if flatSrcs != oldSrcs { - r.SetAttr("srcs", flatSrcs) - } - } -} - -// removeLegacyProto removes uses of the old proto rules. It deletes loads -// from go_proto_library.bzl. It deletes proto filegroups. It removes -// go_proto_library attributes which are no longer recognized. New rules -// are generated in place of the deleted rules, but attributes and comments -// are not migrated. -func removeLegacyProto(c *config.Config, f *rule.File) { - // Don't fix if the proto mode was set to something other than the default. - if pcMode := getProtoMode(c); pcMode != proto.DefaultMode { - return - } - - // Scan for definitions to delete. - var protoLoads []*rule.Load - for _, l := range f.Loads { - if l.Name() == "@io_bazel_rules_go//proto:go_proto_library.bzl" { - protoLoads = append(protoLoads, l) - } - } - var protoFilegroups, protoRules []*rule.Rule - for _, r := range f.Rules { - if r.Kind() == "filegroup" && r.Name() == legacyProtoFilegroupName { - protoFilegroups = append(protoFilegroups, r) - } - if r.Kind() == "go_proto_library" { - protoRules = append(protoRules, r) - } - } - if len(protoLoads)+len(protoFilegroups) == 0 { - return - } - if !c.ShouldFix { - log.Printf("%s: go_proto_library.bzl is deprecated. Run 'gazelle fix' to replace old rules.", f.Path) - return - } - - // Delete legacy proto loads and filegroups. Only delete go_proto_library - // rules if we deleted a load. - for _, l := range protoLoads { - l.Delete() - } - for _, r := range protoFilegroups { - r.Delete() - } - if len(protoLoads) > 0 { - for _, r := range protoRules { - r.Delete() - } - } -} - -// removeLegacyGazelle removes loads of the "gazelle" macro from -// @io_bazel_rules_go//go:def.bzl. The definition has moved to -// @bazel_gazelle//:def.bzl, and the old one will be deleted soon. -func removeLegacyGazelle(c *config.Config, f *rule.File) { - for _, l := range f.Loads { - if l.Name() == "@io_bazel_rules_go//go:def.bzl" && l.Has("gazelle") { - l.Remove("gazelle") - if l.IsEmpty() { - l.Delete() - } - } - } -} - -func isGoRule(kind string) bool { - return kind == "go_library" || - kind == "go_binary" || - kind == "go_test" || - kind == "go_proto_library" || - kind == "go_grpc_library" -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/generate.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/generate.go deleted file mode 100644 index b98e41cb74..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/generate.go +++ /dev/null @@ -1,576 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import ( - "fmt" - "go/build" - "log" - "path" - "path/filepath" - "sort" - "strings" - "sync" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/language" - "github.com/bazelbuild/bazel-gazelle/language/proto" - "github.com/bazelbuild/bazel-gazelle/pathtools" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func (gl *goLang) GenerateRules(args language.GenerateArgs) language.GenerateResult { - // Extract information about proto files. We need this to exclude .pb.go - // files and generate go_proto_library rules. - c := args.Config - gc := getGoConfig(c) - pcMode := getProtoMode(c) - var protoRuleNames []string - protoPackages := make(map[string]proto.Package) - protoFileInfo := make(map[string]proto.FileInfo) - for _, r := range args.OtherGen { - if r.Kind() != "proto_library" { - continue - } - pkg := r.PrivateAttr(proto.PackageKey).(proto.Package) - protoPackages[r.Name()] = pkg - for name, info := range pkg.Files { - protoFileInfo[name] = info - } - protoRuleNames = append(protoRuleNames, r.Name()) - } - sort.Strings(protoRuleNames) - var emptyProtoRuleNames []string - for _, r := range args.OtherEmpty { - if r.Kind() == "proto_library" { - emptyProtoRuleNames = append(emptyProtoRuleNames, r.Name()) - } - } - - // If proto rule generation is enabled, exclude .pb.go files that correspond - // to any .proto files present. - regularFiles := append([]string{}, args.RegularFiles...) - genFiles := append([]string{}, args.GenFiles...) - if !pcMode.ShouldIncludePregeneratedFiles() { - keep := func(f string) bool { - if strings.HasSuffix(f, ".pb.go") { - _, ok := protoFileInfo[strings.TrimSuffix(f, ".pb.go")+".proto"] - return !ok - } - return true - } - filterFiles(®ularFiles, keep) - filterFiles(&genFiles, keep) - } - - // Split regular files into files which can determine the package name and - // import path and other files. - var goFiles, otherFiles []string - for _, f := range regularFiles { - if strings.HasSuffix(f, ".go") { - goFiles = append(goFiles, f) - } else { - otherFiles = append(otherFiles, f) - } - } - - // Look for a subdirectory named testdata. Only treat it as data if it does - // not contain a buildable package. - var hasTestdata bool - for _, sub := range args.Subdirs { - if sub == "testdata" { - hasTestdata = !gl.goPkgRels[path.Join(args.Rel, "testdata")] - break - } - } - - // Build a set of packages from files in this directory. - goPackageMap, goFilesWithUnknownPackage := buildPackages(c, args.Dir, args.Rel, goFiles, hasTestdata) - - // Select a package to generate rules for. If there is no package, create - // an empty package so we can generate empty rules. - var protoName string - pkg, err := selectPackage(c, args.Dir, goPackageMap) - if err != nil { - if _, ok := err.(*build.NoGoError); ok { - if len(protoPackages) == 1 { - for name, ppkg := range protoPackages { - pkg = &goPackage{ - name: goProtoPackageName(ppkg), - importPath: goProtoImportPath(gc, ppkg, args.Rel), - proto: protoTargetFromProtoPackage(name, ppkg), - } - protoName = name - break - } - } else { - pkg = emptyPackage(c, args.Dir, args.Rel) - } - } else { - log.Print(err) - } - } - - // Try to link the selected package with a proto package. - if pkg != nil { - if pkg.importPath == "" { - if err := pkg.inferImportPath(c); err != nil && pkg.firstGoFile() != "" { - inferImportPathErrorOnce.Do(func() { log.Print(err) }) - } - } - for _, name := range protoRuleNames { - ppkg := protoPackages[name] - if pkg.importPath == goProtoImportPath(gc, ppkg, args.Rel) { - protoName = name - pkg.proto = protoTargetFromProtoPackage(name, ppkg) - break - } - } - } - - // Generate rules for proto packages. These should come before the other - // Go rules. - g := newGenerator(c, args.File, args.Rel) - var res language.GenerateResult - var rules []*rule.Rule - var protoEmbed string - for _, name := range protoRuleNames { - ppkg := protoPackages[name] - var rs []*rule.Rule - if name == protoName { - protoEmbed, rs = g.generateProto(pcMode, pkg.proto, pkg.importPath) - } else { - target := protoTargetFromProtoPackage(name, ppkg) - importPath := goProtoImportPath(gc, ppkg, args.Rel) - _, rs = g.generateProto(pcMode, target, importPath) - } - rules = append(rules, rs...) - } - for _, name := range emptyProtoRuleNames { - goProtoName := strings.TrimSuffix(name, "_proto") + "_go_proto" - res.Empty = append(res.Empty, rule.NewRule("go_proto_library", goProtoName)) - } - if pkg != nil && pcMode == proto.PackageMode && pkg.firstGoFile() == "" { - // In proto package mode, don't generate a go_library embedding a - // go_proto_library unless there are actually go files. - protoEmbed = "" - } - - // Complete the Go package and generate rules for that. - if pkg != nil { - // Add files with unknown packages. This happens when there are parse - // or I/O errors. We should keep the file in the srcs list and let the - // compiler deal with the error. - cgo := pkg.haveCgo() - for _, info := range goFilesWithUnknownPackage { - if err := pkg.addFile(c, info, cgo); err != nil { - log.Print(err) - } - } - - // Process the other static files. - for _, file := range otherFiles { - info := otherFileInfo(filepath.Join(args.Dir, file)) - if err := pkg.addFile(c, info, cgo); err != nil { - log.Print(err) - } - } - - // Process generated files. Note that generated files may have the same names - // as static files. Bazel will use the generated files, but we will look at - // the content of static files, assuming they will be the same. - regularFileSet := make(map[string]bool) - for _, f := range regularFiles { - regularFileSet[f] = true - } - for _, f := range genFiles { - if regularFileSet[f] { - continue - } - info := fileNameInfo(filepath.Join(args.Dir, f)) - if err := pkg.addFile(c, info, cgo); err != nil { - log.Print(err) - } - } - - // Generate Go rules. - if protoName == "" { - // Empty proto rules for deletion. - _, rs := g.generateProto(pcMode, pkg.proto, pkg.importPath) - rules = append(rules, rs...) - } - lib := g.generateLib(pkg, protoEmbed) - var libName string - if !lib.IsEmpty(goKinds[lib.Kind()]) { - libName = lib.Name() - } - rules = append(rules, lib) - rules = append(rules, - g.generateBin(pkg, libName), - g.generateTest(pkg, libName)) - } - - for _, r := range rules { - if r.IsEmpty(goKinds[r.Kind()]) { - res.Empty = append(res.Empty, r) - } else { - res.Gen = append(res.Gen, r) - res.Imports = append(res.Imports, r.PrivateAttr(config.GazelleImportsKey)) - } - } - - if args.File != nil || len(res.Gen) > 0 { - gl.goPkgRels[args.Rel] = true - } else { - for _, sub := range args.Subdirs { - if gl.goPkgRels[path.Join(args.Rel, sub)] { - gl.goPkgRels[args.Rel] = true - break - } - } - } - - return res -} - -func filterFiles(files *[]string, pred func(string) bool) { - w := 0 - for r := 0; r < len(*files); r++ { - f := (*files)[r] - if pred(f) { - (*files)[w] = f - w++ - } - } - *files = (*files)[:w] -} - -func buildPackages(c *config.Config, dir, rel string, goFiles []string, hasTestdata bool) (packageMap map[string]*goPackage, goFilesWithUnknownPackage []fileInfo) { - // Process .go and .proto files first, since these determine the package name. - packageMap = make(map[string]*goPackage) - for _, f := range goFiles { - path := filepath.Join(dir, f) - info := goFileInfo(path, rel) - if info.packageName == "" { - goFilesWithUnknownPackage = append(goFilesWithUnknownPackage, info) - continue - } - if info.packageName == "documentation" { - // go/build ignores this package - continue - } - - if _, ok := packageMap[info.packageName]; !ok { - packageMap[info.packageName] = &goPackage{ - name: info.packageName, - dir: dir, - rel: rel, - hasTestdata: hasTestdata, - } - } - if err := packageMap[info.packageName].addFile(c, info, false); err != nil { - log.Print(err) - } - } - return packageMap, goFilesWithUnknownPackage -} - -var inferImportPathErrorOnce sync.Once - -// selectPackages selects one Go packages out of the buildable packages found -// in a directory. If multiple packages are found, it returns the package -// whose name matches the directory if such a package exists. -func selectPackage(c *config.Config, dir string, packageMap map[string]*goPackage) (*goPackage, error) { - buildablePackages := make(map[string]*goPackage) - for name, pkg := range packageMap { - if pkg.isBuildable(c) { - buildablePackages[name] = pkg - } - } - - if len(buildablePackages) == 0 { - return nil, &build.NoGoError{Dir: dir} - } - - if len(buildablePackages) == 1 { - for _, pkg := range buildablePackages { - return pkg, nil - } - } - - if pkg, ok := buildablePackages[defaultPackageName(c, dir)]; ok { - return pkg, nil - } - - err := &build.MultiplePackageError{Dir: dir} - for name, pkg := range buildablePackages { - // Add the first file for each package for the error message. - // Error() method expects these lists to be the same length. File - // lists must be non-empty. These lists are only created by - // buildPackage for packages with .go files present. - err.Packages = append(err.Packages, name) - err.Files = append(err.Files, pkg.firstGoFile()) - } - return nil, err -} - -func emptyPackage(c *config.Config, dir, rel string) *goPackage { - pkg := &goPackage{ - name: defaultPackageName(c, dir), - dir: dir, - rel: rel, - } - pkg.inferImportPath(c) - return pkg -} - -func defaultPackageName(c *config.Config, rel string) string { - gc := getGoConfig(c) - return pathtools.RelBaseName(rel, gc.prefix, "") -} - -// hasDefaultVisibility returns whether oldFile contains a "package" rule with -// a "default_visibility" attribute. Rules generated by Gazelle should not -// have their own visibility attributes if this is the case. -func hasDefaultVisibility(oldFile *rule.File) bool { - for _, r := range oldFile.Rules { - if r.Kind() == "package" && r.Attr("default_visibility") != nil { - return true - } - } - return false -} - -// checkInternalVisibility overrides the given visibility if the package is -// internal. -func checkInternalVisibility(rel, visibility string) string { - if i := strings.LastIndex(rel, "/internal/"); i >= 0 { - visibility = fmt.Sprintf("//%s:__subpackages__", rel[:i]) - } else if strings.HasPrefix(rel, "internal/") { - visibility = "//:__subpackages__" - } - return visibility -} - -type generator struct { - c *config.Config - rel string - shouldSetVisibility bool -} - -func newGenerator(c *config.Config, f *rule.File, rel string) *generator { - shouldSetVisibility := f == nil || !hasDefaultVisibility(f) - return &generator{c: c, rel: rel, shouldSetVisibility: shouldSetVisibility} -} - -func (g *generator) generateProto(mode proto.Mode, target protoTarget, importPath string) (string, []*rule.Rule) { - if !mode.ShouldGenerateRules() && mode != proto.LegacyMode { - // Don't create or delete proto rules in this mode. Any existing rules - // are likely hand-written. - return "", nil - } - - filegroupName := legacyProtoFilegroupName - protoName := target.name - if protoName == "" { - importPath := inferImportPath(getGoConfig(g.c), g.rel) - protoName = proto.RuleName(importPath) - } - goProtoName := strings.TrimSuffix(protoName, "_proto") + "_go_proto" - visibility := []string{checkInternalVisibility(g.rel, "//visibility:public")} - - if mode == proto.LegacyMode { - filegroup := rule.NewRule("filegroup", filegroupName) - if target.sources.isEmpty() { - return "", []*rule.Rule{filegroup} - } - filegroup.SetAttr("srcs", target.sources.build()) - if g.shouldSetVisibility { - filegroup.SetAttr("visibility", visibility) - } - return "", []*rule.Rule{filegroup} - } - - if target.sources.isEmpty() { - return "", []*rule.Rule{ - rule.NewRule("filegroup", filegroupName), - rule.NewRule("go_proto_library", goProtoName), - } - } - - goProtoLibrary := rule.NewRule("go_proto_library", goProtoName) - goProtoLibrary.SetAttr("proto", ":"+protoName) - g.setImportAttrs(goProtoLibrary, importPath) - if target.hasServices { - goProtoLibrary.SetAttr("compilers", []string{"@io_bazel_rules_go//proto:go_grpc"}) - } - if g.shouldSetVisibility { - goProtoLibrary.SetAttr("visibility", visibility) - } - goProtoLibrary.SetPrivateAttr(config.GazelleImportsKey, target.imports.build()) - return goProtoName, []*rule.Rule{goProtoLibrary} -} - -func (g *generator) generateLib(pkg *goPackage, embed string) *rule.Rule { - goLibrary := rule.NewRule("go_library", defaultLibName) - if !pkg.library.sources.hasGo() && embed == "" { - return goLibrary // empty - } - var visibility string - if pkg.isCommand() { - // Libraries made for a go_binary should not be exposed to the public. - visibility = "//visibility:private" - } else { - visibility = checkInternalVisibility(pkg.rel, "//visibility:public") - } - g.setCommonAttrs(goLibrary, pkg.rel, visibility, pkg.library, embed) - g.setImportAttrs(goLibrary, pkg.importPath) - return goLibrary -} - -func (g *generator) generateBin(pkg *goPackage, library string) *rule.Rule { - name := pathtools.RelBaseName(pkg.rel, getGoConfig(g.c).prefix, g.c.RepoRoot) - goBinary := rule.NewRule("go_binary", name) - if !pkg.isCommand() || pkg.binary.sources.isEmpty() && library == "" { - return goBinary // empty - } - visibility := checkInternalVisibility(pkg.rel, "//visibility:public") - g.setCommonAttrs(goBinary, pkg.rel, visibility, pkg.binary, library) - return goBinary -} - -func (g *generator) generateTest(pkg *goPackage, library string) *rule.Rule { - goTest := rule.NewRule("go_test", defaultTestName) - if !pkg.test.sources.hasGo() { - return goTest // empty - } - g.setCommonAttrs(goTest, pkg.rel, "", pkg.test, library) - if pkg.hasTestdata { - goTest.SetAttr("data", rule.GlobValue{Patterns: []string{"testdata/**"}}) - } - return goTest -} - -func (g *generator) setCommonAttrs(r *rule.Rule, pkgRel, visibility string, target goTarget, embed string) { - if !target.sources.isEmpty() { - r.SetAttr("srcs", target.sources.buildFlat()) - } - if target.cgo { - r.SetAttr("cgo", true) - } - if !target.clinkopts.isEmpty() { - r.SetAttr("clinkopts", g.options(target.clinkopts.build(), pkgRel)) - } - if !target.copts.isEmpty() { - r.SetAttr("copts", g.options(target.copts.build(), pkgRel)) - } - if g.shouldSetVisibility && visibility != "" { - r.SetAttr("visibility", []string{visibility}) - } - if embed != "" { - r.SetAttr("embed", []string{":" + embed}) - } - r.SetPrivateAttr(config.GazelleImportsKey, target.imports.build()) -} - -func (g *generator) setImportAttrs(r *rule.Rule, importPath string) { - r.SetAttr("importpath", importPath) - goConf := getGoConfig(g.c) - if goConf.importMapPrefix != "" { - fromPrefixRel := pathtools.TrimPrefix(g.rel, goConf.importMapPrefixRel) - importMap := path.Join(goConf.importMapPrefix, fromPrefixRel) - if importMap != importPath { - r.SetAttr("importmap", importMap) - } - } -} - -var ( - // shortOptPrefixes are strings that come at the beginning of an option - // argument that includes a path, e.g., -Ifoo/bar. - shortOptPrefixes = []string{"-I", "-L", "-F"} - - // longOptPrefixes are separate arguments that come before a path argument, - // e.g., -iquote foo/bar. - longOptPrefixes = []string{"-I", "-L", "-F", "-iquote", "-isystem"} -) - -// options transforms package-relative paths in cgo options into repository- -// root-relative paths that Bazel can understand. For example, if a cgo file -// in //foo declares an include flag in its copts: "-Ibar", this method -// will transform that flag into "-Ifoo/bar". -func (g *generator) options(opts rule.PlatformStrings, pkgRel string) rule.PlatformStrings { - fixPath := func(opt string) string { - if strings.HasPrefix(opt, "/") { - return opt - } - return path.Clean(path.Join(pkgRel, opt)) - } - - fixGroups := func(groups []string) ([]string, error) { - fixedGroups := make([]string, len(groups)) - for i, group := range groups { - opts := strings.Split(group, optSeparator) - fixedOpts := make([]string, len(opts)) - isPath := false - for j, opt := range opts { - if isPath { - opt = fixPath(opt) - isPath = false - goto next - } - - for _, short := range shortOptPrefixes { - if strings.HasPrefix(opt, short) && len(opt) > len(short) { - opt = short + fixPath(opt[len(short):]) - goto next - } - } - - for _, long := range longOptPrefixes { - if opt == long { - isPath = true - goto next - } - } - - next: - fixedOpts[j] = escapeOption(opt) - } - fixedGroups[i] = strings.Join(fixedOpts, " ") - } - - return fixedGroups, nil - } - - opts, errs := opts.MapSlice(fixGroups) - if errs != nil { - log.Panicf("unexpected error when transforming options with pkg %q: %v", pkgRel, errs) - } - return opts -} - -func escapeOption(opt string) string { - return strings.NewReplacer( - `\`, `\\`, - `'`, `\'`, - `"`, `\"`, - ` `, `\ `, - "\t", "\\\t", - "\n", "\\\n", - "\r", "\\\r", - ).Replace(opt) -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/kinds.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/kinds.go deleted file mode 100644 index 633b194df6..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/kinds.go +++ /dev/null @@ -1,147 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import "github.com/bazelbuild/bazel-gazelle/rule" - -var goKinds = map[string]rule.KindInfo{ - "filegroup": { - NonEmptyAttrs: map[string]bool{"srcs": true}, - MergeableAttrs: map[string]bool{"srcs": true}, - }, - "go_binary": { - MatchAny: true, - NonEmptyAttrs: map[string]bool{ - "deps": true, - "embed": true, - "srcs": true, - }, - SubstituteAttrs: map[string]bool{"embed": true}, - MergeableAttrs: map[string]bool{ - "cgo": true, - "clinkopts": true, - "copts": true, - "embed": true, - "srcs": true, - }, - ResolveAttrs: map[string]bool{"deps": true}, - }, - "go_library": { - MatchAttrs: []string{"importpath"}, - NonEmptyAttrs: map[string]bool{ - "deps": true, - "embed": true, - "srcs": true, - }, - SubstituteAttrs: map[string]bool{ - "embed": true, - }, - MergeableAttrs: map[string]bool{ - "cgo": true, - "clinkopts": true, - "copts": true, - "embed": true, - "importmap": true, - "importpath": true, - "srcs": true, - }, - ResolveAttrs: map[string]bool{"deps": true}, - }, - "go_proto_library": { - MatchAttrs: []string{"importpath"}, - NonEmptyAttrs: map[string]bool{ - "deps": true, - "embed": true, - "proto": true, - "srcs": true, - }, - SubstituteAttrs: map[string]bool{"proto": true}, - MergeableAttrs: map[string]bool{ - "srcs": true, - "importpath": true, - "importmap": true, - "cgo": true, - "clinkopts": true, - "copts": true, - "embed": true, - "proto": true, - }, - ResolveAttrs: map[string]bool{"deps": true}, - }, - "go_repository": { - MatchAttrs: []string{"importpath"}, - NonEmptyAttrs: nil, // never empty - MergeableAttrs: map[string]bool{ - "commit": true, - "importpath": true, - "remote": true, - "sha256": true, - "strip_prefix": true, - "tag": true, - "type": true, - "urls": true, - "vcs": true, - }, - }, - "go_test": { - NonEmptyAttrs: map[string]bool{ - "deps": true, - "embed": true, - "srcs": true, - }, - MergeableAttrs: map[string]bool{ - "cgo": true, - "clinkopts": true, - "copts": true, - "embed": true, - "srcs": true, - }, - ResolveAttrs: map[string]bool{"deps": true}, - }, -} - -var goLoads = []rule.LoadInfo{ - { - Name: "@io_bazel_rules_go//go:def.bzl", - Symbols: []string{ - "cgo_library", - "go_binary", - "go_library", - "go_prefix", - "go_repository", - "go_test", - }, - }, { - Name: "@io_bazel_rules_go//proto:def.bzl", - Symbols: []string{ - "go_grpc_library", - "go_proto_library", - }, - }, { - Name: "@bazel_gazelle//:deps.bzl", - Symbols: []string{ - "go_repository", - }, - After: []string{ - "go_rules_dependencies", - "go_register_toolchains", - "gazelle_dependencies", - }, - }, -} - -func (_ *goLang) Kinds() map[string]rule.KindInfo { return goKinds } -func (_ *goLang) Loads() []rule.LoadInfo { return goLoads } diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_go_imports.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_go_imports.go deleted file mode 100644 index 584dec47f0..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_go_imports.go +++ /dev/null @@ -1,157 +0,0 @@ -// Generated by internal/language/proto/gen/gen_known_imports.go -// From internal/language/proto/proto.csv - -package golang - -import "github.com/bazelbuild/bazel-gazelle/label" - -var knownGoProtoImports = map[string]label.Label{ - - "github.com/golang/protobuf/ptypes/any": label.New("io_bazel_rules_go", "proto/wkt", "any_go_proto"), - "google.golang.org/genproto/protobuf/api": label.New("io_bazel_rules_go", "proto/wkt", "api_go_proto"), - "github.com/golang/protobuf/protoc-gen-go/plugin": label.New("io_bazel_rules_go", "proto/wkt", "compiler_plugin_go_proto"), - "github.com/golang/protobuf/protoc-gen-go/descriptor": label.New("io_bazel_rules_go", "proto/wkt", "descriptor_go_proto"), - "github.com/golang/protobuf/ptypes/duration": label.New("io_bazel_rules_go", "proto/wkt", "duration_go_proto"), - "github.com/golang/protobuf/ptypes/empty": label.New("io_bazel_rules_go", "proto/wkt", "empty_go_proto"), - "google.golang.org/genproto/protobuf/field_mask": label.New("io_bazel_rules_go", "proto/wkt", "field_mask_go_proto"), - "google.golang.org/genproto/protobuf/source_context": label.New("io_bazel_rules_go", "proto/wkt", "source_context_go_proto"), - "github.com/golang/protobuf/ptypes/struct": label.New("io_bazel_rules_go", "proto/wkt", "struct_go_proto"), - "github.com/golang/protobuf/ptypes/timestamp": label.New("io_bazel_rules_go", "proto/wkt", "timestamp_go_proto"), - "google.golang.org/genproto/protobuf/ptype": label.New("io_bazel_rules_go", "proto/wkt", "type_go_proto"), - "github.com/golang/protobuf/ptypes/wrappers": label.New("io_bazel_rules_go", "proto/wkt", "wrappers_go_proto"), - "google.golang.org/genproto/googleapis/api/annotations": label.New("go_googleapis", "google/api", "annotations_go_proto"), - "google.golang.org/genproto/googleapis/api/serviceconfig": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google.golang.org/genproto/googleapis/api/configchange": label.New("go_googleapis", "google/api", "configchange_go_proto"), - "google.golang.org/genproto/googleapis/api/distribution": label.New("go_googleapis", "google/api", "distribution_go_proto"), - "google.golang.org/genproto/googleapis/api": label.New("go_googleapis", "google/api", "api_go_proto"), - "google.golang.org/genproto/googleapis/api/expr/v1alpha1": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google.golang.org/genproto/googleapis/api/expr/v1beta1": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_go_proto"), - "google.golang.org/genproto/googleapis/api/httpbody": label.New("go_googleapis", "google/api", "httpbody_go_proto"), - "google.golang.org/genproto/googleapis/api/label": label.New("go_googleapis", "google/api", "label_go_proto"), - "google.golang.org/genproto/googleapis/api/metric": label.New("go_googleapis", "google/api", "metric_go_proto"), - "google.golang.org/genproto/googleapis/api/monitoredres": label.New("go_googleapis", "google/api", "monitoredres_go_proto"), - "google.golang.org/genproto/googleapis/api/servicecontrol/v1": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google.golang.org/genproto/googleapis/api/servicemanagement/v1": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_go_proto"), - "google.golang.org/genproto/googleapis/appengine/legacy": label.New("go_googleapis", "google/appengine/legacy", "legacy_go_proto"), - "google.golang.org/genproto/googleapis/appengine/logging/v1": label.New("go_googleapis", "google/appengine/logging/v1", "logging_go_proto"), - "google.golang.org/genproto/googleapis/appengine/v1": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1": label.New("go_googleapis", "google/assistant/embedded/v1alpha1", "embedded_go_proto"), - "google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2": label.New("go_googleapis", "google/assistant/embedded/v1alpha2", "embedded_go_proto"), - "google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"), - "google.golang.org/genproto/googleapis/bigtable/admin/table/v1": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"), - "google.golang.org/genproto/googleapis/bigtable/admin/v2": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"), - "google.golang.org/genproto/googleapis/bigtable/v1": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"), - "google.golang.org/genproto/googleapis/bigtable/v2": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"), - "google.golang.org/genproto/googleapis/bytestream": label.New("go_googleapis", "google/bytestream", "bytestream_go_proto"), - "google.golang.org/genproto/googleapis/cloud/asset/v1beta1": label.New("go_googleapis", "google/cloud/asset/v1beta1", "asset_go_proto"), - "google.golang.org/genproto/googleapis/cloud/audit": label.New("go_googleapis", "google/cloud/audit", "audit_go_proto"), - "google.golang.org/genproto/googleapis/cloud/automl/v1beta1": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"), - "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_go_proto"), - "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_go_proto"), - "google.golang.org/genproto/googleapis/cloud/billing/v1": label.New("go_googleapis", "google/cloud/billing/v1", "billing_go_proto"), - "google.golang.org/genproto/googleapis/cloud/dataproc/v1": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"), - "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"), - "google.golang.org/genproto/googleapis/cloud/dialogflow/v2": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google.golang.org/genproto/googleapis/cloud/functions/v1beta2": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"), - "google.golang.org/genproto/googleapis/cloud/iot/v1": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"), - "google.golang.org/genproto/googleapis/cloud/kms/v1": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"), - "google.golang.org/genproto/googleapis/cloud/language/v1": label.New("go_googleapis", "google/cloud/language/v1", "language_go_proto"), - "google.golang.org/genproto/googleapis/cloud/language/v1beta1": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_go_proto"), - "google.golang.org/genproto/googleapis/cloud/language/v1beta2": label.New("go_googleapis", "google/cloud/language/v1beta2", "language_go_proto"), - "google.golang.org/genproto/googleapis/cloud/location": label.New("go_googleapis", "google/cloud/location", "location_go_proto"), - "google.golang.org/genproto/googleapis/cloud/ml/v1": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"), - "google.golang.org/genproto/googleapis/cloud/oslogin/common": label.New("go_googleapis", "google/cloud/oslogin/common", "common_go_proto"), - "google.golang.org/genproto/googleapis/cloud/oslogin/v1": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_go_proto"), - "google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_go_proto"), - "google.golang.org/genproto/googleapis/cloud/oslogin/v1beta": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_go_proto"), - "google.golang.org/genproto/googleapis/cloud/redis/v1": label.New("go_googleapis", "google/cloud/redis/v1", "redis_go_proto"), - "google.golang.org/genproto/googleapis/cloud/redis/v1beta1": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_go_proto"), - "google.golang.org/genproto/googleapis/cloud/resourcemanager/v2": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_go_proto"), - "google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"), - "google.golang.org/genproto/googleapis/cloud/speech/v1": label.New("go_googleapis", "google/cloud/speech/v1", "speech_go_proto"), - "google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_go_proto"), - "google.golang.org/genproto/googleapis/cloud/support/common": label.New("go_googleapis", "google/cloud/support", "common_go_proto"), - "google.golang.org/genproto/googleapis/cloud/support/v1alpha1": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_go_proto"), - "google.golang.org/genproto/googleapis/cloud/tasks/v2beta2": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"), - "google.golang.org/genproto/googleapis/cloud/tasks/v2beta3": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_go_proto"), - "google.golang.org/genproto/googleapis/cloud/texttospeech/v1": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_go_proto"), - "google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_go_proto"), - "google.golang.org/genproto/googleapis/cloud/videointelligence/v1": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_go_proto"), - "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_go_proto"), - "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_go_proto"), - "google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_go_proto"), - "google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1p2beta1", "videointelligence_go_proto"), - "google.golang.org/genproto/googleapis/cloud/vision/v1": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"), - "google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"), - "google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"), - "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google.golang.org/genproto/googleapis/container/v1": label.New("go_googleapis", "google/container/v1", "container_go_proto"), - "google.golang.org/genproto/googleapis/container/v1alpha1": label.New("go_googleapis", "google/container/v1alpha1", "container_go_proto"), - "google.golang.org/genproto/googleapis/container/v1beta1": label.New("go_googleapis", "google/container/v1beta1", "container_go_proto"), - "google.golang.org/genproto/googleapis/datastore/admin/v1": label.New("go_googleapis", "google/datastore/admin/v1", "admin_go_proto"), - "google.golang.org/genproto/googleapis/datastore/admin/v1beta1": label.New("go_googleapis", "google/datastore/admin/v1beta1", "admin_go_proto"), - "google.golang.org/genproto/googleapis/datastore/v1": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"), - "google.golang.org/genproto/googleapis/datastore/v1beta3": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"), - "google.golang.org/genproto/googleapis/devtools/build/v1": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"), - "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1": label.New("go_googleapis", "google/devtools/cloudbuild/v1", "cloudbuild_go_proto"), - "google.golang.org/genproto/googleapis/devtools/clouddebugger/v2": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"), - "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"), - "google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_go_proto"), - "google.golang.org/genproto/googleapis/devtools/cloudtrace/v1": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_go_proto"), - "google.golang.org/genproto/googleapis/devtools/cloudtrace/v2": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/attestation", "attestation_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/build", "build_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/common", "common_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1", "containeranalysis_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/deployment", "deployment_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/discovery", "discovery_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/grafeas": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/grafeas", "grafeas_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/image", "image_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/package", "package_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/provenance", "provenance_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/source", "source_go_proto"), - "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/vulnerability", "vulnerability_go_proto"), - "google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test": label.New("go_googleapis", "google/devtools/remoteexecution/v1test", "remoteexecution_go_proto"), - "google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"), - "google.golang.org/genproto/googleapis/devtools/resultstore/v2": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google.golang.org/genproto/googleapis/devtools/source/v1": label.New("go_googleapis", "google/devtools/source/v1", "source_go_proto"), - "google.golang.org/genproto/googleapis/devtools/sourcerepo/v1": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_go_proto"), - "google.golang.org/genproto/googleapis/example/library/v1": label.New("go_googleapis", "google/example/library/v1", "library_go_proto"), - "google.golang.org/genproto/googleapis/firestore/admin/v1beta1": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"), - "google.golang.org/genproto/googleapis/firestore/admin/v1beta2": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_go_proto"), - "google.golang.org/genproto/googleapis/firestore/v1beta1": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"), - "google.golang.org/genproto/googleapis/genomics/v1": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google.golang.org/genproto/googleapis/genomics/v1alpha2": label.New("go_googleapis", "google/genomics/v1alpha2", "genomics_go_proto"), - "google.golang.org/genproto/googleapis/home/graph/v1": label.New("go_googleapis", "google/home/graph/v1", "graph_go_proto"), - "google.golang.org/genproto/googleapis/iam/admin/v1": label.New("go_googleapis", "google/iam/admin/v1", "admin_go_proto"), - "google.golang.org/genproto/googleapis/iam/credentials/v1": label.New("go_googleapis", "google/iam/credentials/v1", "credentials_go_proto"), - "google.golang.org/genproto/googleapis/iam/v1": label.New("go_googleapis", "google/iam/v1", "iam_go_proto"), - "google.golang.org/genproto/googleapis/iam/v1/logging": label.New("go_googleapis", "google/iam/v1/logging", "logging_go_proto"), - "google.golang.org/genproto/googleapis/logging/type": label.New("go_googleapis", "google/logging/type", "ltype_go_proto"), - "google.golang.org/genproto/googleapis/logging/v2": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"), - "google.golang.org/genproto/googleapis/longrunning": label.New("go_googleapis", "google/longrunning", "longrunning_go_proto"), - "google.golang.org/genproto/googleapis/monitoring/v3": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google.golang.org/genproto/googleapis/privacy/dlp/v2": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_go_proto"), - "google.golang.org/genproto/googleapis/pubsub/v1": label.New("go_googleapis", "google/pubsub/v1", "pubsub_go_proto"), - "google.golang.org/genproto/googleapis/pubsub/v1beta2": label.New("go_googleapis", "google/pubsub/v1beta2", "pubsub_go_proto"), - "google.golang.org/genproto/googleapis/rpc/code": label.New("go_googleapis", "google/rpc", "code_go_proto"), - "google.golang.org/genproto/googleapis/rpc/errdetails": label.New("go_googleapis", "google/rpc", "errdetails_go_proto"), - "google.golang.org/genproto/googleapis/rpc/status": label.New("go_googleapis", "google/rpc", "status_go_proto"), - "google.golang.org/genproto/googleapis/spanner/admin/database/v1": label.New("go_googleapis", "google/spanner/admin/database/v1", "database_go_proto"), - "google.golang.org/genproto/googleapis/spanner/admin/instance/v1": label.New("go_googleapis", "google/spanner/admin/instance/v1", "instance_go_proto"), - "google.golang.org/genproto/googleapis/spanner/v1": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google.golang.org/genproto/googleapis/storagetransfer/v1": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"), - "google.golang.org/genproto/googleapis/streetview/publish/v1": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"), - "google.golang.org/genproto/googleapis/type/color": label.New("go_googleapis", "google/type", "color_go_proto"), - "google.golang.org/genproto/googleapis/type/date": label.New("go_googleapis", "google/type", "date_go_proto"), - "google.golang.org/genproto/googleapis/type/dayofweek": label.New("go_googleapis", "google/type", "dayofweek_go_proto"), - "google.golang.org/genproto/googleapis/type/latlng": label.New("go_googleapis", "google/type", "latlng_go_proto"), - "google.golang.org/genproto/googleapis/type/money": label.New("go_googleapis", "google/type", "money_go_proto"), - "google.golang.org/genproto/googleapis/type/postaladdress": label.New("go_googleapis", "google/type", "postaladdress_go_proto"), - "google.golang.org/genproto/googleapis/type/timeofday": label.New("go_googleapis", "google/type", "timeofday_go_proto"), - "google.golang.org/genproto/googleapis/watcher/v1": label.New("go_googleapis", "google/watcher/v1", "watcher_go_proto"), -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_proto_imports.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_proto_imports.go deleted file mode 100644 index 7436c3de1b..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/known_proto_imports.go +++ /dev/null @@ -1,365 +0,0 @@ -// Generated by internal/language/proto/gen/gen_known_imports.go -// From internal/language/proto/proto.csv - -package golang - -import "github.com/bazelbuild/bazel-gazelle/label" - -var knownProtoImports = map[string]label.Label{ - - "google/protobuf/any.proto": label.New("io_bazel_rules_go", "proto/wkt", "any_go_proto"), - "google/protobuf/api.proto": label.New("io_bazel_rules_go", "proto/wkt", "api_go_proto"), - "google/protobuf/compiler/plugin.proto": label.New("io_bazel_rules_go", "proto/wkt", "compiler_plugin_go_proto"), - "google/protobuf/descriptor.proto": label.New("io_bazel_rules_go", "proto/wkt", "descriptor_go_proto"), - "google/protobuf/duration.proto": label.New("io_bazel_rules_go", "proto/wkt", "duration_go_proto"), - "google/protobuf/empty.proto": label.New("io_bazel_rules_go", "proto/wkt", "empty_go_proto"), - "google/protobuf/field_mask.proto": label.New("io_bazel_rules_go", "proto/wkt", "field_mask_go_proto"), - "google/protobuf/source_context.proto": label.New("io_bazel_rules_go", "proto/wkt", "source_context_go_proto"), - "google/protobuf/struct.proto": label.New("io_bazel_rules_go", "proto/wkt", "struct_go_proto"), - "google/protobuf/timestamp.proto": label.New("io_bazel_rules_go", "proto/wkt", "timestamp_go_proto"), - "google/protobuf/type.proto": label.New("io_bazel_rules_go", "proto/wkt", "type_go_proto"), - "google/protobuf/wrappers.proto": label.New("io_bazel_rules_go", "proto/wkt", "wrappers_go_proto"), - "google/api/annotations.proto": label.New("go_googleapis", "google/api", "annotations_go_proto"), - "google/api/auth.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/backend.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/billing.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/config_change.proto": label.New("go_googleapis", "google/api", "configchange_go_proto"), - "google/api/consumer.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/context.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/control.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/distribution.proto": label.New("go_googleapis", "google/api", "distribution_go_proto"), - "google/api/documentation.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/endpoint.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/experimental/authorization_config.proto": label.New("go_googleapis", "google/api", "api_go_proto"), - "google/api/experimental/experimental.proto": label.New("go_googleapis", "google/api", "api_go_proto"), - "google/api/expr/v1alpha1/cel_service.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google/api/expr/v1alpha1/checked.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google/api/expr/v1alpha1/eval.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google/api/expr/v1alpha1/explain.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google/api/expr/v1alpha1/syntax.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google/api/expr/v1alpha1/value.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_go_proto"), - "google/api/expr/v1beta1/decl.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_go_proto"), - "google/api/expr/v1beta1/eval.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_go_proto"), - "google/api/expr/v1beta1/expr.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_go_proto"), - "google/api/expr/v1beta1/source.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_go_proto"), - "google/api/expr/v1beta1/value.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_go_proto"), - "google/api/http.proto": label.New("go_googleapis", "google/api", "annotations_go_proto"), - "google/api/httpbody.proto": label.New("go_googleapis", "google/api", "httpbody_go_proto"), - "google/api/label.proto": label.New("go_googleapis", "google/api", "label_go_proto"), - "google/api/launch_stage.proto": label.New("go_googleapis", "google/api", "api_go_proto"), - "google/api/log.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/logging.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/metric.proto": label.New("go_googleapis", "google/api", "metric_go_proto"), - "google/api/monitored_resource.proto": label.New("go_googleapis", "google/api", "monitoredres_go_proto"), - "google/api/monitoring.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/quota.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/service.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/servicecontrol/v1/check_error.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicecontrol/v1/distribution.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicecontrol/v1/log_entry.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicecontrol/v1/metric_value.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicecontrol/v1/operation.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicecontrol/v1/quota_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicecontrol/v1/service_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"), - "google/api/servicemanagement/v1/resources.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_go_proto"), - "google/api/servicemanagement/v1/servicemanager.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_go_proto"), - "google/api/source_info.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/system_parameter.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/api/usage.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"), - "google/appengine/legacy/audit_data.proto": label.New("go_googleapis", "google/appengine/legacy", "legacy_go_proto"), - "google/appengine/logging/v1/request_log.proto": label.New("go_googleapis", "google/appengine/logging/v1", "logging_go_proto"), - "google/appengine/v1/app_yaml.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/appengine.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/application.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/audit_data.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/deploy.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/instance.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/location.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/operation.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/service.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/appengine/v1/version.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"), - "google/assistant/embedded/v1alpha1/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha1", "embedded_go_proto"), - "google/assistant/embedded/v1alpha2/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha2", "embedded_go_proto"), - "google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"), - "google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"), - "google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"), - "google/bigtable/admin/table/v1/bigtable_table_data.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"), - "google/bigtable/admin/table/v1/bigtable_table_service.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"), - "google/bigtable/admin/table/v1/bigtable_table_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"), - "google/bigtable/admin/v2/bigtable_instance_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"), - "google/bigtable/admin/v2/bigtable_table_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"), - "google/bigtable/admin/v2/common.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"), - "google/bigtable/admin/v2/instance.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"), - "google/bigtable/admin/v2/table.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"), - "google/bigtable/v1/bigtable_data.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"), - "google/bigtable/v1/bigtable_service.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"), - "google/bigtable/v1/bigtable_service_messages.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"), - "google/bigtable/v2/bigtable.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"), - "google/bigtable/v2/data.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"), - "google/bytestream/bytestream.proto": label.New("go_googleapis", "google/bytestream", "bytestream_go_proto"), - "google/cloud/asset/v1beta1/asset_service.proto": label.New("go_googleapis", "google/cloud/asset/v1beta1", "asset_go_proto"), - "google/cloud/asset/v1beta1/assets.proto": label.New("go_googleapis", "google/cloud/asset/v1beta1", "asset_go_proto"), - "google/cloud/audit/audit_log.proto": label.New("go_googleapis", "google/cloud/audit", "audit_go_proto"), - "google/cloud/automl/v1beta1/annotation_payload.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/classification.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/data_items.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/dataset.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/image.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/io.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/model.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/model_evaluation.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/operations.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/prediction_service.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/service.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/text.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/automl/v1beta1/translation.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_go_proto"), - "google/cloud/bigquery/datatransfer/v1/datatransfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"), - "google/cloud/bigquery/datatransfer/v1/transfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"), - "google/cloud/bigquery/logging/v1/audit_data.proto": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_go_proto"), - "google/cloud/bigquery/storage/v1beta1/avro.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_go_proto"), - "google/cloud/bigquery/storage/v1beta1/read_options.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_go_proto"), - "google/cloud/bigquery/storage/v1beta1/storage.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_go_proto"), - "google/cloud/bigquery/storage/v1beta1/table_reference.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_go_proto"), - "google/cloud/billing/v1/cloud_billing.proto": label.New("go_googleapis", "google/cloud/billing/v1", "billing_go_proto"), - "google/cloud/dataproc/v1/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"), - "google/cloud/dataproc/v1/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"), - "google/cloud/dataproc/v1/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"), - "google/cloud/dataproc/v1beta2/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"), - "google/cloud/dataproc/v1beta2/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"), - "google/cloud/dataproc/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"), - "google/cloud/dataproc/v1beta2/shared.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"), - "google/cloud/dataproc/v1beta2/workflow_templates.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"), - "google/cloud/dialogflow/v2/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/audio_config.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/document.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/knowledge_base.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/dialogflow/v2beta1/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"), - "google/cloud/functions/v1beta2/functions.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"), - "google/cloud/functions/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"), - "google/cloud/iot/v1/device_manager.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"), - "google/cloud/iot/v1/resources.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"), - "google/cloud/kms/v1/resources.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"), - "google/cloud/kms/v1/service.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"), - "google/cloud/language/v1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1", "language_go_proto"), - "google/cloud/language/v1beta1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_go_proto"), - "google/cloud/language/v1beta2/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta2", "language_go_proto"), - "google/cloud/location/locations.proto": label.New("go_googleapis", "google/cloud/location", "location_go_proto"), - "google/cloud/ml/v1/job_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"), - "google/cloud/ml/v1/model_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"), - "google/cloud/ml/v1/operation_metadata.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"), - "google/cloud/ml/v1/prediction_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"), - "google/cloud/ml/v1/project_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"), - "google/cloud/oslogin/common/common.proto": label.New("go_googleapis", "google/cloud/oslogin/common", "common_go_proto"), - "google/cloud/oslogin/v1/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_go_proto"), - "google/cloud/oslogin/v1alpha/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_go_proto"), - "google/cloud/oslogin/v1beta/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_go_proto"), - "google/cloud/redis/v1/cloud_redis.proto": label.New("go_googleapis", "google/cloud/redis/v1", "redis_go_proto"), - "google/cloud/redis/v1beta1/cloud_redis.proto": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_go_proto"), - "google/cloud/resourcemanager/v2/folders.proto": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_go_proto"), - "google/cloud/runtimeconfig/v1beta1/resources.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"), - "google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"), - "google/cloud/speech/v1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1", "speech_go_proto"), - "google/cloud/speech/v1p1beta1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_go_proto"), - "google/cloud/support/common.proto": label.New("go_googleapis", "google/cloud/support", "common_go_proto"), - "google/cloud/support/v1alpha1/cloud_support.proto": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_go_proto"), - "google/cloud/tasks/v2beta2/cloudtasks.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"), - "google/cloud/tasks/v2beta2/queue.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"), - "google/cloud/tasks/v2beta2/target.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"), - "google/cloud/tasks/v2beta2/task.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"), - "google/cloud/tasks/v2beta3/cloudtasks.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_go_proto"), - "google/cloud/tasks/v2beta3/queue.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_go_proto"), - "google/cloud/tasks/v2beta3/target.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_go_proto"), - "google/cloud/tasks/v2beta3/task.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_go_proto"), - "google/cloud/texttospeech/v1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_go_proto"), - "google/cloud/texttospeech/v1beta1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_go_proto"), - "google/cloud/videointelligence/v1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_go_proto"), - "google/cloud/videointelligence/v1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_go_proto"), - "google/cloud/videointelligence/v1beta2/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_go_proto"), - "google/cloud/videointelligence/v1p1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_go_proto"), - "google/cloud/videointelligence/v1p2beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1p2beta1", "videointelligence_go_proto"), - "google/cloud/vision/v1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"), - "google/cloud/vision/v1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"), - "google/cloud/vision/v1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"), - "google/cloud/vision/v1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"), - "google/cloud/vision/v1p1beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"), - "google/cloud/vision/v1p1beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"), - "google/cloud/vision/v1p1beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"), - "google/cloud/vision/v1p1beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"), - "google/cloud/vision/v1p2beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"), - "google/cloud/vision/v1p2beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"), - "google/cloud/vision/v1p2beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"), - "google/cloud/vision/v1p2beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"), - "google/cloud/vision/v1p3beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google/cloud/vision/v1p3beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google/cloud/vision/v1p3beta1/product_search.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google/cloud/vision/v1p3beta1/product_search_service.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google/cloud/vision/v1p3beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google/cloud/vision/v1p3beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_go_proto"), - "google/cloud/websecurityscanner/v1alpha/crawled_url.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/cloud/websecurityscanner/v1alpha/finding.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/cloud/websecurityscanner/v1alpha/finding_addon.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/cloud/websecurityscanner/v1alpha/scan_config.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/cloud/websecurityscanner/v1alpha/scan_run.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"), - "google/container/v1/cluster_service.proto": label.New("go_googleapis", "google/container/v1", "container_go_proto"), - "google/container/v1alpha1/cluster_service.proto": label.New("go_googleapis", "google/container/v1alpha1", "container_go_proto"), - "google/container/v1beta1/cluster_service.proto": label.New("go_googleapis", "google/container/v1beta1", "container_go_proto"), - "google/datastore/admin/v1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1", "admin_go_proto"), - "google/datastore/admin/v1/index.proto": label.New("go_googleapis", "google/datastore/admin/v1", "admin_go_proto"), - "google/datastore/admin/v1beta1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1beta1", "admin_go_proto"), - "google/datastore/v1/datastore.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"), - "google/datastore/v1/entity.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"), - "google/datastore/v1/query.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"), - "google/datastore/v1beta3/datastore.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"), - "google/datastore/v1beta3/entity.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"), - "google/datastore/v1beta3/query.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"), - "google/devtools/build/v1/build_events.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"), - "google/devtools/build/v1/build_status.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"), - "google/devtools/build/v1/publish_build_event.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"), - "google/devtools/cloudbuild/v1/cloudbuild.proto": label.New("go_googleapis", "google/devtools/cloudbuild/v1", "cloudbuild_go_proto"), - "google/devtools/clouddebugger/v2/controller.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"), - "google/devtools/clouddebugger/v2/data.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"), - "google/devtools/clouddebugger/v2/debugger.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"), - "google/devtools/clouderrorreporting/v1beta1/common.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"), - "google/devtools/clouderrorreporting/v1beta1/error_group_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"), - "google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"), - "google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"), - "google/devtools/cloudprofiler/v2/profiler.proto": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_go_proto"), - "google/devtools/cloudtrace/v1/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_go_proto"), - "google/devtools/cloudtrace/v2/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"), - "google/devtools/cloudtrace/v2/tracing.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"), - "google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1alpha1/containeranalysis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1alpha1/image_basis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1alpha1/provenance.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1alpha1/source_context.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1beta1/attestation/attestation.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/attestation", "attestation_go_proto"), - "google/devtools/containeranalysis/v1beta1/build/build.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/build", "build_go_proto"), - "google/devtools/containeranalysis/v1beta1/common/common.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/common", "common_go_proto"), - "google/devtools/containeranalysis/v1beta1/containeranalysis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1", "containeranalysis_go_proto"), - "google/devtools/containeranalysis/v1beta1/deployment/deployment.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/deployment", "deployment_go_proto"), - "google/devtools/containeranalysis/v1beta1/discovery/discovery.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/discovery", "discovery_go_proto"), - "google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/grafeas", "grafeas_go_proto"), - "google/devtools/containeranalysis/v1beta1/image/image.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/image", "image_go_proto"), - "google/devtools/containeranalysis/v1beta1/package/package.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/package", "package_go_proto"), - "google/devtools/containeranalysis/v1beta1/provenance/provenance.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/provenance", "provenance_go_proto"), - "google/devtools/containeranalysis/v1beta1/source/source.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/source", "source_go_proto"), - "google/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/vulnerability", "vulnerability_go_proto"), - "google/devtools/remoteexecution/v1test/remote_execution.proto": label.New("go_googleapis", "google/devtools/remoteexecution/v1test", "remoteexecution_go_proto"), - "google/devtools/remoteworkers/v1test2/bots.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"), - "google/devtools/remoteworkers/v1test2/command.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"), - "google/devtools/remoteworkers/v1test2/tasks.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"), - "google/devtools/remoteworkers/v1test2/worker.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"), - "google/devtools/resultstore/v2/action.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/common.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/configuration.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/configured_target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/coverage.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/coverage_summary.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/file.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/file_set.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/invocation.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/resultstore_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/resultstore_file_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/resultstore/v2/test_suite.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"), - "google/devtools/source/v1/source_context.proto": label.New("go_googleapis", "google/devtools/source/v1", "source_go_proto"), - "google/devtools/sourcerepo/v1/sourcerepo.proto": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_go_proto"), - "google/example/library/v1/library.proto": label.New("go_googleapis", "google/example/library/v1", "library_go_proto"), - "google/firestore/admin/v1beta1/firestore_admin.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"), - "google/firestore/admin/v1beta1/index.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"), - "google/firestore/admin/v1beta2/field.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_go_proto"), - "google/firestore/admin/v1beta2/firestore_admin.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_go_proto"), - "google/firestore/admin/v1beta2/index.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_go_proto"), - "google/firestore/admin/v1beta2/operation.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_go_proto"), - "google/firestore/v1beta1/common.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"), - "google/firestore/v1beta1/document.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"), - "google/firestore/v1beta1/firestore.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"), - "google/firestore/v1beta1/query.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"), - "google/firestore/v1beta1/write.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"), - "google/genomics/v1/annotations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/cigar.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/datasets.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/operations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/position.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/range.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/readalignment.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/readgroup.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/readgroupset.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/reads.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/references.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1/variants.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"), - "google/genomics/v1alpha2/pipelines.proto": label.New("go_googleapis", "google/genomics/v1alpha2", "genomics_go_proto"), - "google/home/graph/v1/device.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_go_proto"), - "google/home/graph/v1/homegraph.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_go_proto"), - "google/iam/admin/v1/iam.proto": label.New("go_googleapis", "google/iam/admin/v1", "admin_go_proto"), - "google/iam/credentials/v1/common.proto": label.New("go_googleapis", "google/iam/credentials/v1", "credentials_go_proto"), - "google/iam/credentials/v1/iamcredentials.proto": label.New("go_googleapis", "google/iam/credentials/v1", "credentials_go_proto"), - "google/iam/v1/iam_policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_go_proto"), - "google/iam/v1/logging/audit_data.proto": label.New("go_googleapis", "google/iam/v1/logging", "logging_go_proto"), - "google/iam/v1/policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_go_proto"), - "google/logging/type/http_request.proto": label.New("go_googleapis", "google/logging/type", "ltype_go_proto"), - "google/logging/type/log_severity.proto": label.New("go_googleapis", "google/logging/type", "ltype_go_proto"), - "google/logging/v2/log_entry.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"), - "google/logging/v2/logging.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"), - "google/logging/v2/logging_config.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"), - "google/logging/v2/logging_metrics.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"), - "google/longrunning/operations.proto": label.New("go_googleapis", "google/longrunning", "longrunning_go_proto"), - "google/monitoring/v3/alert.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/alert_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/common.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/dropped_labels.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/group.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/group_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/metric.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/metric_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/mutation_record.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/notification.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/notification_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/span_context.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/uptime.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/monitoring/v3/uptime_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"), - "google/privacy/dlp/v2/dlp.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_go_proto"), - "google/privacy/dlp/v2/storage.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_go_proto"), - "google/pubsub/v1/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1", "pubsub_go_proto"), - "google/pubsub/v1beta2/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1beta2", "pubsub_go_proto"), - "google/rpc/code.proto": label.New("go_googleapis", "google/rpc", "code_go_proto"), - "google/rpc/error_details.proto": label.New("go_googleapis", "google/rpc", "errdetails_go_proto"), - "google/rpc/status.proto": label.New("go_googleapis", "google/rpc", "status_go_proto"), - "google/spanner/admin/database/v1/spanner_database_admin.proto": label.New("go_googleapis", "google/spanner/admin/database/v1", "database_go_proto"), - "google/spanner/admin/instance/v1/spanner_instance_admin.proto": label.New("go_googleapis", "google/spanner/admin/instance/v1", "instance_go_proto"), - "google/spanner/v1/keys.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/spanner/v1/mutation.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/spanner/v1/query_plan.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/spanner/v1/result_set.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/spanner/v1/spanner.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/spanner/v1/transaction.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/spanner/v1/type.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"), - "google/storagetransfer/v1/transfer.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"), - "google/storagetransfer/v1/transfer_types.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"), - "google/streetview/publish/v1/resources.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"), - "google/streetview/publish/v1/rpcmessages.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"), - "google/streetview/publish/v1/streetview_publish.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"), - "google/type/color.proto": label.New("go_googleapis", "google/type", "color_go_proto"), - "google/type/date.proto": label.New("go_googleapis", "google/type", "date_go_proto"), - "google/type/dayofweek.proto": label.New("go_googleapis", "google/type", "dayofweek_go_proto"), - "google/type/latlng.proto": label.New("go_googleapis", "google/type", "latlng_go_proto"), - "google/type/money.proto": label.New("go_googleapis", "google/type", "money_go_proto"), - "google/type/postal_address.proto": label.New("go_googleapis", "google/type", "postaladdress_go_proto"), - "google/type/timeofday.proto": label.New("go_googleapis", "google/type", "timeofday_go_proto"), - "google/watcher/v1/watch.proto": label.New("go_googleapis", "google/watcher/v1", "watcher_go_proto"), -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/lang.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/lang.go deleted file mode 100644 index 27d50cae29..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/lang.go +++ /dev/null @@ -1,70 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package golang provides support for Go and Go proto rules. It generates -// go_library, go_binary, go_test, and go_proto_library rules. -// -// Configuration -// -// Go rules support the flags -build_tags, -go_prefix, and -external. -// They also support the directives # gazelle:build_tags, # gazelle:prefix, -// and # gazelle:importmap_prefix. See -// https://github.com/bazelbuild/bazel-gazelle/blob/master/README.rst#directives -// for information on these. -// -// Rule generation -// -// Currently, Gazelle generates rules for one Go package per directory. In -// general, we aim to support Go code which is compatible with "go build". If -// there are no buildable packages, Gazelle will delete existing rules with -// default names. If there are multiple packages, Gazelle will pick one that -// matches the directory name or will print an error if no such package is -// found. -// -// Gazelle names library and test rules somewhat oddly: go_default_library, and -// go_default_test. This is for historic reasons: before the importpath -// attribute was mandatory, import paths were inferred from label names. Even if -// we never support multiple packages in the future (we should), we should -// migrate away from this because it's surprising. Libraries should generally -// be named after their directories. -// -// Dependency resolution -// -// Go libraries are indexed by their importpath attribute. Gazelle attempts to -// resolve libraries by import path using the index, filtered using the -// vendoring algorithm. If an import doesn't match any known library, Gazelle -// guesses a name for it, locally (if the import path is under the current -// prefix), or in an external repository or vendor directory (depending -// on external mode). -// -// Gazelle has special cases for import paths associated with proto Well -// Known Types and Google APIs. rules_go declares canonical rules for these. -package golang - -import "github.com/bazelbuild/bazel-gazelle/language" - -const goName = "go" - -type goLang struct { - // goPkgDirs is a set of relative paths to directories containing buildable - // Go code, including in subdirectories. - goPkgRels map[string]bool -} - -func (_ *goLang) Name() string { return goName } - -func NewLanguage() language.Language { - return &goLang{goPkgRels: make(map[string]bool)} -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/package.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/package.go deleted file mode 100644 index 69317ac193..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/package.go +++ /dev/null @@ -1,488 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import ( - "fmt" - "log" - "path" - "sort" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/language/proto" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// goPackage contains metadata for a set of .go and .proto files that can be -// used to generate Go rules. -type goPackage struct { - name, dir, rel string - library, binary, test goTarget - proto protoTarget - hasTestdata bool - importPath string -} - -// goTarget contains information used to generate an individual Go rule -// (library, binary, or test). -type goTarget struct { - sources, imports, copts, clinkopts platformStringsBuilder - cgo bool -} - -// protoTarget contains information used to generate a go_proto_library rule. -type protoTarget struct { - name string - sources platformStringsBuilder - imports platformStringsBuilder - hasServices bool -} - -// platformStringsBuilder is used to construct rule.PlatformStrings. Bazel -// has some requirements for deps list (a dependency cannot appear in more -// than one select expression; dependencies cannot be duplicated), so we need -// to build these carefully. -type platformStringsBuilder struct { - strs map[string]platformStringInfo -} - -// platformStringInfo contains information about a single string (source, -// import, or option). -type platformStringInfo struct { - set platformStringSet - oss map[string]bool - archs map[string]bool - platforms map[rule.Platform]bool -} - -type platformStringSet int - -const ( - genericSet platformStringSet = iota - osSet - archSet - platformSet -) - -// addFile adds the file described by "info" to a target in the package "p" if -// the file is buildable. -// -// "cgo" tells whether any ".go" file in the package contains cgo code. This -// affects whether C files are added to targets. -// -// An error is returned if a file is buildable but invalid (for example, a -// test .go file containing cgo code). Files that are not buildable will not -// be added to any target (for example, .txt files). -func (pkg *goPackage) addFile(c *config.Config, info fileInfo, cgo bool) error { - switch { - case info.ext == unknownExt || !cgo && (info.ext == cExt || info.ext == csExt): - return nil - case info.ext == protoExt: - if pcMode := getProtoMode(c); pcMode == proto.LegacyMode { - // Only add files in legacy mode. This is used to generate a filegroup - // that contains all protos. In order modes, we get the .proto files - // from information emitted by the proto language extension. - pkg.proto.addFile(c, info) - } - case info.isTest: - if info.isCgo { - return fmt.Errorf("%s: use of cgo in test not supported", info.path) - } - pkg.test.addFile(c, info) - default: - pkg.library.addFile(c, info) - } - - return nil -} - -// isCommand returns true if the package name is "main". -func (pkg *goPackage) isCommand() bool { - return pkg.name == "main" -} - -// isBuildable returns true if anything in the package is buildable. -// This is true if the package has Go code that satisfies build constraints -// on any platform or has proto files not in legacy mode. -func (pkg *goPackage) isBuildable(c *config.Config) bool { - return pkg.firstGoFile() != "" || !pkg.proto.sources.isEmpty() -} - -// firstGoFile returns the name of a .go file if the package contains at least -// one .go file, or "" otherwise. -func (pkg *goPackage) firstGoFile() string { - goSrcs := []platformStringsBuilder{ - pkg.library.sources, - pkg.binary.sources, - pkg.test.sources, - } - for _, sb := range goSrcs { - if sb.strs != nil { - for s := range sb.strs { - if strings.HasSuffix(s, ".go") { - return s - } - } - } - } - return "" -} - -func (pkg *goPackage) haveCgo() bool { - return pkg.library.cgo || pkg.binary.cgo || pkg.test.cgo -} - -func (pkg *goPackage) inferImportPath(c *config.Config) error { - if pkg.importPath != "" { - log.Panic("importPath already set") - } - gc := getGoConfig(c) - if !gc.prefixSet { - return fmt.Errorf("%s: go prefix is not set, so importpath can't be determined for rules. Set a prefix with a '# gazelle:prefix' comment or with -go_prefix on the command line", pkg.dir) - } - pkg.importPath = inferImportPath(gc, pkg.rel) - - if pkg.rel == gc.prefixRel { - pkg.importPath = gc.prefix - } else { - fromPrefixRel := strings.TrimPrefix(pkg.rel, gc.prefixRel+"/") - pkg.importPath = path.Join(gc.prefix, fromPrefixRel) - } - return nil -} - -func inferImportPath(gc *goConfig, rel string) string { - if rel == gc.prefixRel { - return gc.prefix - } else { - fromPrefixRel := strings.TrimPrefix(rel, gc.prefixRel+"/") - return path.Join(gc.prefix, fromPrefixRel) - } -} - -func goProtoPackageName(pkg proto.Package) string { - if value, ok := pkg.Options["go_package"]; ok { - if strings.LastIndexByte(value, '/') == -1 { - return value - } else { - if i := strings.LastIndexByte(value, ';'); i != -1 { - return value[i+1:] - } else { - return path.Base(value) - } - } - } - return strings.Replace(pkg.Name, ".", "_", -1) -} - -func goProtoImportPath(gc *goConfig, pkg proto.Package, rel string) string { - if value, ok := pkg.Options["go_package"]; ok { - if strings.LastIndexByte(value, '/') == -1 { - return inferImportPath(gc, rel) - } else if i := strings.LastIndexByte(value, ';'); i != -1 { - return value[:i] - } else { - return value - } - } - return inferImportPath(gc, rel) -} - -func (t *goTarget) addFile(c *config.Config, info fileInfo) { - t.cgo = t.cgo || info.isCgo - add := getPlatformStringsAddFunction(c, info, nil) - add(&t.sources, info.name) - add(&t.imports, info.imports...) - for _, copts := range info.copts { - optAdd := add - if len(copts.tags) > 0 { - optAdd = getPlatformStringsAddFunction(c, info, copts.tags) - } - optAdd(&t.copts, copts.opts) - } - for _, clinkopts := range info.clinkopts { - optAdd := add - if len(clinkopts.tags) > 0 { - optAdd = getPlatformStringsAddFunction(c, info, clinkopts.tags) - } - optAdd(&t.clinkopts, clinkopts.opts) - } -} - -func protoTargetFromProtoPackage(name string, pkg proto.Package) protoTarget { - target := protoTarget{name: name} - for f := range pkg.Files { - target.sources.addGenericString(f) - } - for i := range pkg.Imports { - target.imports.addGenericString(i) - } - target.hasServices = pkg.HasServices - return target -} - -func (t *protoTarget) addFile(c *config.Config, info fileInfo) { - t.sources.addGenericString(info.name) - for _, imp := range info.imports { - t.imports.addGenericString(imp) - } - t.hasServices = t.hasServices || info.hasServices -} - -// getPlatformStringsAddFunction returns a function used to add strings to -// a *platformStringsBuilder under the same set of constraints. This is a -// performance optimization to avoid evaluating constraints repeatedly. -func getPlatformStringsAddFunction(c *config.Config, info fileInfo, cgoTags tagLine) func(sb *platformStringsBuilder, ss ...string) { - isOSSpecific, isArchSpecific := isOSArchSpecific(info, cgoTags) - - switch { - case !isOSSpecific && !isArchSpecific: - if checkConstraints(c, "", "", info.goos, info.goarch, info.tags, cgoTags) { - return func(sb *platformStringsBuilder, ss ...string) { - for _, s := range ss { - sb.addGenericString(s) - } - } - } - - case isOSSpecific && !isArchSpecific: - var osMatch []string - for _, os := range rule.KnownOSs { - if checkConstraints(c, os, "", info.goos, info.goarch, info.tags, cgoTags) { - osMatch = append(osMatch, os) - } - } - if len(osMatch) > 0 { - return func(sb *platformStringsBuilder, ss ...string) { - for _, s := range ss { - sb.addOSString(s, osMatch) - } - } - } - - case !isOSSpecific && isArchSpecific: - var archMatch []string - for _, arch := range rule.KnownArchs { - if checkConstraints(c, "", arch, info.goos, info.goarch, info.tags, cgoTags) { - archMatch = append(archMatch, arch) - } - } - if len(archMatch) > 0 { - return func(sb *platformStringsBuilder, ss ...string) { - for _, s := range ss { - sb.addArchString(s, archMatch) - } - } - } - - default: - var platformMatch []rule.Platform - for _, platform := range rule.KnownPlatforms { - if checkConstraints(c, platform.OS, platform.Arch, info.goos, info.goarch, info.tags, cgoTags) { - platformMatch = append(platformMatch, platform) - } - } - if len(platformMatch) > 0 { - return func(sb *platformStringsBuilder, ss ...string) { - for _, s := range ss { - sb.addPlatformString(s, platformMatch) - } - } - } - } - - return func(_ *platformStringsBuilder, _ ...string) {} -} - -func (sb *platformStringsBuilder) isEmpty() bool { - return sb.strs == nil -} - -func (sb *platformStringsBuilder) hasGo() bool { - for s := range sb.strs { - if strings.HasSuffix(s, ".go") { - return true - } - } - return false -} - -func (sb *platformStringsBuilder) addGenericString(s string) { - if sb.strs == nil { - sb.strs = make(map[string]platformStringInfo) - } - sb.strs[s] = platformStringInfo{set: genericSet} -} - -func (sb *platformStringsBuilder) addOSString(s string, oss []string) { - if sb.strs == nil { - sb.strs = make(map[string]platformStringInfo) - } - si, ok := sb.strs[s] - if !ok { - si.set = osSet - si.oss = make(map[string]bool) - } - switch si.set { - case genericSet: - return - case osSet: - for _, os := range oss { - si.oss[os] = true - } - default: - si.convertToPlatforms() - for _, os := range oss { - for _, arch := range rule.KnownOSArchs[os] { - si.platforms[rule.Platform{OS: os, Arch: arch}] = true - } - } - } - sb.strs[s] = si -} - -func (sb *platformStringsBuilder) addArchString(s string, archs []string) { - if sb.strs == nil { - sb.strs = make(map[string]platformStringInfo) - } - si, ok := sb.strs[s] - if !ok { - si.set = archSet - si.archs = make(map[string]bool) - } - switch si.set { - case genericSet: - return - case archSet: - for _, arch := range archs { - si.archs[arch] = true - } - default: - si.convertToPlatforms() - for _, arch := range archs { - for _, os := range rule.KnownArchOSs[arch] { - si.platforms[rule.Platform{OS: os, Arch: arch}] = true - } - } - } - sb.strs[s] = si -} - -func (sb *platformStringsBuilder) addPlatformString(s string, platforms []rule.Platform) { - if sb.strs == nil { - sb.strs = make(map[string]platformStringInfo) - } - si, ok := sb.strs[s] - if !ok { - si.set = platformSet - si.platforms = make(map[rule.Platform]bool) - } - switch si.set { - case genericSet: - return - default: - si.convertToPlatforms() - for _, p := range platforms { - si.platforms[p] = true - } - } - sb.strs[s] = si -} - -func (sb *platformStringsBuilder) build() rule.PlatformStrings { - var ps rule.PlatformStrings - for s, si := range sb.strs { - switch si.set { - case genericSet: - ps.Generic = append(ps.Generic, s) - case osSet: - if ps.OS == nil { - ps.OS = make(map[string][]string) - } - for os := range si.oss { - ps.OS[os] = append(ps.OS[os], s) - } - case archSet: - if ps.Arch == nil { - ps.Arch = make(map[string][]string) - } - for arch := range si.archs { - ps.Arch[arch] = append(ps.Arch[arch], s) - } - case platformSet: - if ps.Platform == nil { - ps.Platform = make(map[rule.Platform][]string) - } - for p := range si.platforms { - ps.Platform[p] = append(ps.Platform[p], s) - } - } - } - sort.Strings(ps.Generic) - if ps.OS != nil { - for _, ss := range ps.OS { - sort.Strings(ss) - } - } - if ps.Arch != nil { - for _, ss := range ps.Arch { - sort.Strings(ss) - } - } - if ps.Platform != nil { - for _, ss := range ps.Platform { - sort.Strings(ss) - } - } - return ps -} - -func (sb *platformStringsBuilder) buildFlat() []string { - strs := make([]string, 0, len(sb.strs)) - for s := range sb.strs { - strs = append(strs, s) - } - sort.Strings(strs) - return strs -} - -func (si *platformStringInfo) convertToPlatforms() { - switch si.set { - case genericSet: - log.Panic("cannot convert generic string to platforms") - case platformSet: - return - case osSet: - si.set = platformSet - si.platforms = make(map[rule.Platform]bool) - for os := range si.oss { - for _, arch := range rule.KnownOSArchs[os] { - si.platforms[rule.Platform{OS: os, Arch: arch}] = true - } - } - si.oss = nil - case archSet: - si.set = platformSet - si.platforms = make(map[rule.Platform]bool) - for arch := range si.archs { - for _, os := range rule.KnownArchOSs[arch] { - si.platforms[rule.Platform{OS: os, Arch: arch}] = true - } - } - si.archs = nil - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/resolve.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/resolve.go deleted file mode 100644 index 36e7442afb..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/resolve.go +++ /dev/null @@ -1,335 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package golang - -import ( - "errors" - "fmt" - "go/build" - "log" - "path" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/label" - "github.com/bazelbuild/bazel-gazelle/pathtools" - "github.com/bazelbuild/bazel-gazelle/repo" - "github.com/bazelbuild/bazel-gazelle/resolve" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func (_ *goLang) Imports(_ *config.Config, r *rule.Rule, f *rule.File) []resolve.ImportSpec { - if !isGoLibrary(r.Kind()) { - return nil - } - if importPath := r.AttrString("importpath"); importPath == "" { - return []resolve.ImportSpec{} - } else { - return []resolve.ImportSpec{{goName, importPath}} - } -} - -func (_ *goLang) Embeds(r *rule.Rule, from label.Label) []label.Label { - embedStrings := r.AttrStrings("embed") - if isGoProtoLibrary(r.Kind()) { - embedStrings = append(embedStrings, r.AttrString("proto")) - } - embedLabels := make([]label.Label, 0, len(embedStrings)) - for _, s := range embedStrings { - l, err := label.Parse(s) - if err != nil { - continue - } - l = l.Abs(from.Repo, from.Pkg) - embedLabels = append(embedLabels, l) - } - return embedLabels -} - -func (gl *goLang) Resolve(c *config.Config, ix *resolve.RuleIndex, rc *repo.RemoteCache, r *rule.Rule, importsRaw interface{}, from label.Label) { - if importsRaw == nil { - // may not be set in tests. - return - } - imports := importsRaw.(rule.PlatformStrings) - r.DelAttr("deps") - resolve := resolveGo - if r.Kind() == "go_proto_library" { - resolve = resolveProto - } - deps, errs := imports.Map(func(imp string) (string, error) { - l, err := resolve(c, ix, rc, r, imp, from) - if err == skipImportError { - return "", nil - } else if err != nil { - return "", err - } - for _, embed := range gl.Embeds(r, from) { - if embed.Equal(l) { - return "", nil - } - } - l = l.Rel(from.Repo, from.Pkg) - return l.String(), nil - }) - for _, err := range errs { - log.Print(err) - } - if !deps.IsEmpty() { - if r.Kind() == "go_proto_library" { - // protos may import the same library multiple times by different names, - // so we need to de-duplicate them. Protos are not platform-specific, - // so it's safe to just flatten them. - r.SetAttr("deps", deps.Flat()) - } else { - r.SetAttr("deps", deps) - } - } -} - -var ( - skipImportError = errors.New("std or self import") - notFoundError = errors.New("rule not found") -) - -func resolveGo(c *config.Config, ix *resolve.RuleIndex, rc *repo.RemoteCache, r *rule.Rule, imp string, from label.Label) (label.Label, error) { - gc := getGoConfig(c) - pcMode := getProtoMode(c) - if build.IsLocalImport(imp) { - cleanRel := path.Clean(path.Join(from.Pkg, imp)) - if build.IsLocalImport(cleanRel) { - return label.NoLabel, fmt.Errorf("relative import path %q from %q points outside of repository", imp, from.Pkg) - } - imp = path.Join(gc.prefix, cleanRel) - } - - if isStandard(imp) { - return label.NoLabel, skipImportError - } - - if l, ok := resolve.FindRuleWithOverride(c, resolve.ImportSpec{Lang: "go", Imp: imp}, "go"); ok { - return l, nil - } - - if pcMode.ShouldUseKnownImports() { - // These are commonly used libraries that depend on Well Known Types. - // They depend on the generated versions of these protos to avoid conflicts. - // However, since protoc-gen-go depends on these libraries, we generate - // its rules in disable_global mode (to avoid cyclic dependency), so the - // "go_default_library" versions of these libraries depend on the - // pre-generated versions of the proto libraries. - switch imp { - case "github.com/golang/protobuf/jsonpb": - return label.New("com_github_golang_protobuf", "jsonpb", "go_default_library_gen"), nil - case "github.com/golang/protobuf/descriptor": - return label.New("com_github_golang_protobuf", "descriptor", "go_default_library_gen"), nil - case "github.com/golang/protobuf/ptypes": - return label.New("com_github_golang_protobuf", "ptypes", "go_default_library_gen"), nil - case "github.com/golang/protobuf/protoc-gen-go/generator": - return label.New("com_github_golang_protobuf", "protoc-gen-go/generator", "go_default_library_gen"), nil - case "google.golang.org/grpc": - return label.New("org_golang_google_grpc", "", "go_default_library"), nil - } - if l, ok := knownGoProtoImports[imp]; ok { - return l, nil - } - } - - if l, err := resolveWithIndexGo(ix, imp, from); err == nil || err == skipImportError { - return l, err - } else if err != notFoundError { - return label.NoLabel, err - } - - // Special cases for rules_go and bazel_gazelle. - // These have names that don't following conventions and they're - // typeically declared with http_archive, not go_repository, so Gazelle - // won't recognize them. - if pathtools.HasPrefix(imp, "github.com/bazelbuild/rules_go") { - pkg := pathtools.TrimPrefix(imp, "github.com/bazelbuild/rules_go") - return label.New("io_bazel_rules_go", pkg, "go_default_library"), nil - } else if pathtools.HasPrefix(imp, "github.com/bazelbuild/bazel-gazelle") { - pkg := pathtools.TrimPrefix(imp, "github.com/bazelbuild/bazel-gazelle") - return label.New("bazel_gazelle", pkg, "go_default_library"), nil - } - - if pathtools.HasPrefix(imp, gc.prefix) { - pkg := path.Join(gc.prefixRel, pathtools.TrimPrefix(imp, gc.prefix)) - return label.New("", pkg, defaultLibName), nil - } - - if gc.depMode == externalMode { - return resolveExternal(rc, imp) - } else { - return resolveVendored(rc, imp) - } -} - -// isStandard returns whether a package is in the standard library. -func isStandard(imp string) bool { - return stdPackages[imp] -} - -func resolveWithIndexGo(ix *resolve.RuleIndex, imp string, from label.Label) (label.Label, error) { - matches := ix.FindRulesByImport(resolve.ImportSpec{Lang: "go", Imp: imp}, "go") - var bestMatch resolve.FindResult - var bestMatchIsVendored bool - var bestMatchVendorRoot string - var matchError error - - for _, m := range matches { - // Apply vendoring logic for Go libraries. A library in a vendor directory - // is only visible in the parent tree. Vendored libraries supercede - // non-vendored libraries, and libraries closer to from.Pkg supercede - // those further up the tree. - isVendored := false - vendorRoot := "" - parts := strings.Split(m.Label.Pkg, "/") - for i := len(parts) - 1; i >= 0; i-- { - if parts[i] == "vendor" { - isVendored = true - vendorRoot = strings.Join(parts[:i], "/") - break - } - } - if isVendored { - } - if isVendored && !label.New(m.Label.Repo, vendorRoot, "").Contains(from) { - // vendor directory not visible - continue - } - if bestMatch.Label.Equal(label.NoLabel) || isVendored && (!bestMatchIsVendored || len(vendorRoot) > len(bestMatchVendorRoot)) { - // Current match is better - bestMatch = m - bestMatchIsVendored = isVendored - bestMatchVendorRoot = vendorRoot - matchError = nil - } else if (!isVendored && bestMatchIsVendored) || (isVendored && len(vendorRoot) < len(bestMatchVendorRoot)) { - // Current match is worse - } else { - // Match is ambiguous - matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.Label, m.Label, imp, from) - } - } - if matchError != nil { - return label.NoLabel, matchError - } - if bestMatch.Label.Equal(label.NoLabel) { - return label.NoLabel, notFoundError - } - if bestMatch.IsSelfImport(from) { - return label.NoLabel, skipImportError - } - return bestMatch.Label, nil -} - -func resolveExternal(rc *repo.RemoteCache, imp string) (label.Label, error) { - prefix, repo, err := rc.Root(imp) - if err != nil { - return label.NoLabel, err - } - - var pkg string - if imp != prefix { - pkg = pathtools.TrimPrefix(imp, prefix) - } - - return label.New(repo, pkg, defaultLibName), nil -} - -func resolveVendored(rc *repo.RemoteCache, imp string) (label.Label, error) { - return label.New("", path.Join("vendor", imp), defaultLibName), nil -} - -func resolveProto(c *config.Config, ix *resolve.RuleIndex, rc *repo.RemoteCache, r *rule.Rule, imp string, from label.Label) (label.Label, error) { - pcMode := getProtoMode(c) - - if wellKnownProtos[imp] { - return label.NoLabel, skipImportError - } - - if l, ok := resolve.FindRuleWithOverride(c, resolve.ImportSpec{Lang: "proto", Imp: imp}, "go"); ok { - return l, nil - } - - if l, ok := knownProtoImports[imp]; ok && pcMode.ShouldUseKnownImports() { - if l.Equal(from) { - return label.NoLabel, skipImportError - } else { - return l, nil - } - } - - if l, err := resolveWithIndexProto(ix, imp, from); err == nil || err == skipImportError { - return l, err - } else if err != notFoundError { - return label.NoLabel, err - } - - // As a fallback, guess the label based on the proto file name. We assume - // all proto files in a directory belong to the same package, and the - // package name matches the directory base name. We also assume that protos - // in the vendor directory must refer to something else in vendor. - rel := path.Dir(imp) - if rel == "." { - rel = "" - } - if from.Pkg == "vendor" || strings.HasPrefix(from.Pkg, "vendor/") { - rel = path.Join("vendor", rel) - } - return label.New("", rel, defaultLibName), nil -} - -// wellKnownProtos is the set of proto sets for which we don't need to add -// an explicit dependency in go_proto_library. -// TODO(jayconrod): generate from -// @io_bazel_rules_go//proto/wkt:WELL_KNOWN_TYPE_PACKAGES -var wellKnownProtos = map[string]bool{ - "google/protobuf/any.proto": true, - "google/protobuf/api.proto": true, - "google/protobuf/compiler_plugin.proto": true, - "google/protobuf/descriptor.proto": true, - "google/protobuf/duration.proto": true, - "google/protobuf/empty.proto": true, - "google/protobuf/field_mask.proto": true, - "google/protobuf/source_context.proto": true, - "google/protobuf/struct.proto": true, - "google/protobuf/timestamp.proto": true, - "google/protobuf/type.proto": true, - "google/protobuf/wrappers.proto": true, -} - -func resolveWithIndexProto(ix *resolve.RuleIndex, imp string, from label.Label) (label.Label, error) { - matches := ix.FindRulesByImport(resolve.ImportSpec{Lang: "proto", Imp: imp}, "go") - if len(matches) == 0 { - return label.NoLabel, notFoundError - } - if len(matches) > 1 { - return label.NoLabel, fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", matches[0].Label, matches[1].Label, imp, from) - } - if matches[0].IsSelfImport(from) { - return label.NoLabel, skipImportError - } - return matches[0].Label, nil -} - -func isGoLibrary(kind string) bool { - return kind == "go_library" || isGoProtoLibrary(kind) -} - -func isGoProtoLibrary(kind string) bool { - return kind == "go_proto_library" || kind == "go_grpc_library" -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/std_package_list.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/std_package_list.go deleted file mode 100644 index 34a6d6b22a..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/go/std_package_list.go +++ /dev/null @@ -1,284 +0,0 @@ - -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Generated by gen_std_package_list.go -// DO NOT EDIT - -package golang - -var stdPackages = map[string]bool{ - "archive/tar": true, - "archive/zip": true, - "bufio": true, - "bytes": true, - "cmd/addr2line": true, - "cmd/api": true, - "cmd/asm": true, - "cmd/asm/internal/arch": true, - "cmd/asm/internal/asm": true, - "cmd/asm/internal/flags": true, - "cmd/asm/internal/lex": true, - "cmd/buildid": true, - "cmd/cgo": true, - "cmd/compile": true, - "cmd/compile/internal/amd64": true, - "cmd/compile/internal/arm": true, - "cmd/compile/internal/arm64": true, - "cmd/compile/internal/gc": true, - "cmd/compile/internal/mips": true, - "cmd/compile/internal/mips64": true, - "cmd/compile/internal/ppc64": true, - "cmd/compile/internal/s390x": true, - "cmd/compile/internal/ssa": true, - "cmd/compile/internal/syntax": true, - "cmd/compile/internal/test": true, - "cmd/compile/internal/types": true, - "cmd/compile/internal/x86": true, - "cmd/cover": true, - "cmd/dist": true, - "cmd/doc": true, - "cmd/fix": true, - "cmd/go": true, - "cmd/go/internal/base": true, - "cmd/go/internal/bug": true, - "cmd/go/internal/cache": true, - "cmd/go/internal/cfg": true, - "cmd/go/internal/clean": true, - "cmd/go/internal/cmdflag": true, - "cmd/go/internal/doc": true, - "cmd/go/internal/envcmd": true, - "cmd/go/internal/fix": true, - "cmd/go/internal/fmtcmd": true, - "cmd/go/internal/generate": true, - "cmd/go/internal/get": true, - "cmd/go/internal/help": true, - "cmd/go/internal/list": true, - "cmd/go/internal/load": true, - "cmd/go/internal/run": true, - "cmd/go/internal/str": true, - "cmd/go/internal/test": true, - "cmd/go/internal/tool": true, - "cmd/go/internal/version": true, - "cmd/go/internal/vet": true, - "cmd/go/internal/web": true, - "cmd/go/internal/work": true, - "cmd/gofmt": true, - "cmd/internal/bio": true, - "cmd/internal/browser": true, - "cmd/internal/buildid": true, - "cmd/internal/dwarf": true, - "cmd/internal/edit": true, - "cmd/internal/gcprog": true, - "cmd/internal/goobj": true, - "cmd/internal/obj": true, - "cmd/internal/obj/arm": true, - "cmd/internal/obj/arm64": true, - "cmd/internal/obj/mips": true, - "cmd/internal/obj/ppc64": true, - "cmd/internal/obj/s390x": true, - "cmd/internal/obj/x86": true, - "cmd/internal/objabi": true, - "cmd/internal/objfile": true, - "cmd/internal/src": true, - "cmd/internal/sys": true, - "cmd/internal/test2json": true, - "cmd/link": true, - "cmd/link/internal/amd64": true, - "cmd/link/internal/arm": true, - "cmd/link/internal/arm64": true, - "cmd/link/internal/ld": true, - "cmd/link/internal/loadelf": true, - "cmd/link/internal/loadmacho": true, - "cmd/link/internal/loadpe": true, - "cmd/link/internal/mips": true, - "cmd/link/internal/mips64": true, - "cmd/link/internal/objfile": true, - "cmd/link/internal/ppc64": true, - "cmd/link/internal/s390x": true, - "cmd/link/internal/sym": true, - "cmd/link/internal/x86": true, - "cmd/nm": true, - "cmd/objdump": true, - "cmd/pack": true, - "cmd/pprof": true, - "cmd/test2json": true, - "cmd/trace": true, - "cmd/vet": true, - "cmd/vet/internal/cfg": true, - "cmd/vet/internal/whitelist": true, - "compress/bzip2": true, - "compress/flate": true, - "compress/gzip": true, - "compress/lzw": true, - "compress/zlib": true, - "container/heap": true, - "container/list": true, - "container/ring": true, - "context": true, - "crypto": true, - "crypto/aes": true, - "crypto/cipher": true, - "crypto/des": true, - "crypto/dsa": true, - "crypto/ecdsa": true, - "crypto/elliptic": true, - "crypto/hmac": true, - "crypto/internal/cipherhw": true, - "crypto/md5": true, - "crypto/rand": true, - "crypto/rc4": true, - "crypto/rsa": true, - "crypto/sha1": true, - "crypto/sha256": true, - "crypto/sha512": true, - "crypto/subtle": true, - "crypto/tls": true, - "crypto/x509": true, - "crypto/x509/pkix": true, - "database/sql": true, - "database/sql/driver": true, - "debug/dwarf": true, - "debug/elf": true, - "debug/gosym": true, - "debug/macho": true, - "debug/pe": true, - "debug/plan9obj": true, - "encoding": true, - "encoding/ascii85": true, - "encoding/asn1": true, - "encoding/base32": true, - "encoding/base64": true, - "encoding/binary": true, - "encoding/csv": true, - "encoding/gob": true, - "encoding/hex": true, - "encoding/json": true, - "encoding/pem": true, - "encoding/xml": true, - "errors": true, - "expvar": true, - "flag": true, - "fmt": true, - "go/ast": true, - "go/build": true, - "go/constant": true, - "go/doc": true, - "go/format": true, - "go/importer": true, - "go/internal/gccgoimporter": true, - "go/internal/gcimporter": true, - "go/internal/srcimporter": true, - "go/parser": true, - "go/printer": true, - "go/scanner": true, - "go/token": true, - "go/types": true, - "hash": true, - "hash/adler32": true, - "hash/crc32": true, - "hash/crc64": true, - "hash/fnv": true, - "html": true, - "html/template": true, - "image": true, - "image/color": true, - "image/color/palette": true, - "image/draw": true, - "image/gif": true, - "image/internal/imageutil": true, - "image/jpeg": true, - "image/png": true, - "index/suffixarray": true, - "internal/cpu": true, - "internal/nettrace": true, - "internal/poll": true, - "internal/race": true, - "internal/singleflight": true, - "internal/syscall/windows": true, - "internal/syscall/windows/registry": true, - "internal/syscall/windows/sysdll": true, - "internal/testenv": true, - "internal/testlog": true, - "internal/trace": true, - "io": true, - "io/ioutil": true, - "log": true, - "log/syslog": true, - "math": true, - "math/big": true, - "math/bits": true, - "math/cmplx": true, - "math/rand": true, - "mime": true, - "mime/multipart": true, - "mime/quotedprintable": true, - "net": true, - "net/http": true, - "net/http/cgi": true, - "net/http/cookiejar": true, - "net/http/fcgi": true, - "net/http/httptest": true, - "net/http/httptrace": true, - "net/http/httputil": true, - "net/http/internal": true, - "net/http/pprof": true, - "net/internal/socktest": true, - "net/mail": true, - "net/rpc": true, - "net/rpc/jsonrpc": true, - "net/smtp": true, - "net/textproto": true, - "net/url": true, - "os": true, - "os/exec": true, - "os/signal": true, - "os/signal/internal/pty": true, - "os/user": true, - "path": true, - "path/filepath": true, - "plugin": true, - "reflect": true, - "regexp": true, - "regexp/syntax": true, - "runtime": true, - "runtime/cgo": true, - "runtime/debug": true, - "runtime/internal/atomic": true, - "runtime/internal/sys": true, - "runtime/pprof": true, - "runtime/pprof/internal/profile": true, - "runtime/race": true, - "runtime/trace": true, - "sort": true, - "strconv": true, - "strings": true, - "sync": true, - "sync/atomic": true, - "syscall": true, - "testing": true, - "testing/internal/testdeps": true, - "testing/iotest": true, - "testing/quick": true, - "text/scanner": true, - "text/tabwriter": true, - "text/template": true, - "text/template/parse": true, - "time": true, - "unicode": true, - "unicode/utf16": true, - "unicode/utf8": true, - "unsafe": true, -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/lang.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/lang.go deleted file mode 100644 index 5081dc1c55..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/lang.go +++ /dev/null @@ -1,151 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package language provides an interface for language extensions in Gazelle. -// Support for a new language can be added by defining a package with a -// function named "New" that returns a value assignable to this interface. -// -// TODO(jayconrod): document how to incorporate languages into a gazelle -// binary that can be run by Bazel. -package language - -import ( - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/resolve" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// Language describes an extension for Gazelle that provides support for -// a set of Bazel rules. -// -// Languages are used primarily by the fix and update commands. The order -// in which languages are used matters, since languages may depend on -// one another. For example, go depends on proto, since go_proto_libraries -// are generated from metadata stored in proto_libraries. -// -// A single instance of Language is created for each fix / update run. Some -// state may be stored in this instance, but stateless behavior is encouraged, -// especially since some operations may be concurrent in the future. -// -// Tasks languages are used for -// -// * Configuration (embedded interface config.Configurer). Languages may -// define command line flags and alter the configuration in a directory -// based on directives in build files. -// -// * Fixing deprecated usage of rules in build files. -// -// * Generating rules from source files in a directory. -// -// * Resolving library imports (embedded interface resolve.Resolver). For -// example, import strings like "github.com/foo/bar" in Go can be resolved -// into Bazel labels like "@com_github_foo_bar//:go_default_library". -// -// Tasks languages support -// -// * Generating load statements: languages list files and symbols that may -// be loaded. -// -// * Merging generated rules into existing rules: languages provide metadata -// that helps with rule matching, merging, and deletion. -type Language interface { - // TODO(jayconrod): is embedding Configurer strictly necessary? - config.Configurer - resolve.Resolver - - // Kinds returns a map of maps rule names (kinds) and information on how to - // match and merge attributes that may be found in rules of those kinds. All - // kinds of rules generated for this language may be found here. - Kinds() map[string]rule.KindInfo - - // Loads returns .bzl files and symbols they define. Every rule generated by - // GenerateRules, now or in the past, should be loadable from one of these - // files. - Loads() []rule.LoadInfo - - // GenerateRules extracts build metadata from source files in a directory. - // GenerateRules is called in each directory where an update is requested - // in depth-first post-order. - // - // args contains the arguments for GenerateRules. This is passed as a - // struct to avoid breaking implementations in the future when new - // fields are added. - // - // A GenerateResult struct is returned. Optional fields may be added to this - // type in the future. - // - // Any non-fatal errors this function encounters should be logged using - // log.Print. - GenerateRules(args GenerateArgs) GenerateResult - - // Fix repairs deprecated usage of language-specific rules in f. This is - // called before the file is indexed. Unless c.ShouldFix is true, fixes - // that delete or rename rules should not be performed. - Fix(c *config.Config, f *rule.File) -} - -// GenerateArgs contains arguments for language.GenerateRules. Arguments are -// passed in a struct value so that new fields may be added in the future -// without breaking existing implementations. -type GenerateArgs struct { - // Config is the configuration for the directory where rules are being - // generated. - Config *config.Config - - // Dir is the canonical absolute path to the directory. - Dir string - - // Rel is the slash-separated path to the directory, relative to the - // repository root ("" for the root directory itself). This may be used - // as the package name in labels. - Rel string - - // File is the build file for the directory. File is nil if there is - // no existing build file. - File *rule.File - - // Subdirs is a list of subdirectories in the directory, including - // symbolic links to directories that Gazelle will follow. - // RegularFiles is a list of regular files including other symbolic - // links. - // GeneratedFiles is a list of generated files in the directory - // (usually these are mentioned as "out" or "outs" attributes in rules). - Subdirs, RegularFiles, GenFiles []string - - // OtherEmpty is a list of empty rules generated by other languages. - // OtherGen is a list of generated rules generated by other languages. - OtherEmpty, OtherGen []*rule.Rule -} - -// GenerateResult contains return values for language.GenerateRules. -// Results are returned through a struct value so that new (optional) -// fields may be added without breaking existing implementations. -type GenerateResult struct { - // Gen is a list of rules generated from files found in the directory - // GenerateRules was asked to process. These will be merged with existing - // rules or added to the build file. - Gen []*rule.Rule - - // Empty is a list of rules that cannot be built with the files found in the - // directory GenerateRules was asked to process. These will be merged with - // existing rules. If ther merged rules are empty, they will be deleted. - Empty []*rule.Rule - - // Imports contains information about the imported libraries for each - // rule in Gen. Gen and Imports must have the same length, since they - // correspond. These values are passed to Resolve after merge. The type - // is opaque since different languages may use different representations. - Imports []interface{} -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/BUILD.bazel deleted file mode 100644 index f8b71790d4..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/BUILD.bazel +++ /dev/null @@ -1,28 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "config.go", - "constants.go", - "fileinfo.go", - "fix.go", - "generate.go", - "kinds.go", - "known_imports.go", - "lang.go", - "package.go", - "resolve.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto", - importpath = "github.com/bazelbuild/bazel-gazelle/language/proto", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/config:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/language:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/repo:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/resolve:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/config.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/config.go deleted file mode 100644 index 9a55782899..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/config.go +++ /dev/null @@ -1,256 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import ( - "flag" - "fmt" - "log" - "path" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// ProtoConfig contains configuration values related to protos. -// -// This type is public because other languages need to generate rules based -// on protos, so this configuration may be relevant to them. -type ProtoConfig struct { - // Mode determines how rules are generated for protos. - Mode Mode - - // ModeExplicit indicates whether the proto mode was set explicitly. - ModeExplicit bool - - // GoPrefix is the current Go prefix (the Go extension may set this in the - // root directory only). Used to generate proto rule names in the root - // directory when there are no proto files or the proto package name - // can't be determined. - // TODO(jayconrod): deprecate and remove Go-specific behavior. - GoPrefix string - - // groupOption is an option name that Gazelle will use to group .proto - // files into proto_library rules. If unset, the proto package name is used. - groupOption string -} - -// GetProtoConfig returns the proto language configuration. If the proto -// extension was not run, it will return nil. -func GetProtoConfig(c *config.Config) *ProtoConfig { - pc := c.Exts[protoName] - if pc == nil { - return nil - } - return pc.(*ProtoConfig) -} - -// Mode determines how proto rules are generated. -type Mode int - -const ( - // DefaultMode generates proto_library rules. Other languages should generate - // library rules based on these (e.g., go_proto_library) and should ignore - // checked-in generated files (e.g., .pb.go files) when there is a .proto - // file with a similar name. - DefaultMode Mode = iota - - // DisableMode ignores .proto files and generates empty proto_library rules. - // Checked-in generated files (e.g., .pb.go files) should be treated as - // normal sources. - DisableMode - - // DisableGlobalMode is similar to DisableMode, but it also prevents - // the use of special cases in dependency resolution for well known types - // and Google APIs. - DisableGlobalMode - - // LegacyMode generates filegroups for .proto files if .pb.go files are - // present in the same directory. - LegacyMode - - // PackageMode generates a proto_library for each set of .proto files with - // the same package name in each directory. - PackageMode -) - -func ModeFromString(s string) (Mode, error) { - switch s { - case "default": - return DefaultMode, nil - case "disable": - return DisableMode, nil - case "disable_global": - return DisableGlobalMode, nil - case "legacy": - return LegacyMode, nil - case "package": - return PackageMode, nil - default: - return 0, fmt.Errorf("unrecognized proto mode: %q", s) - } -} - -func (m Mode) String() string { - switch m { - case DefaultMode: - return "default" - case DisableMode: - return "disable" - case DisableGlobalMode: - return "disable_global" - case LegacyMode: - return "legacy" - case PackageMode: - return "package" - default: - log.Panicf("unknown mode %d", m) - return "" - } -} - -func (m Mode) ShouldGenerateRules() bool { - switch m { - case DisableMode, DisableGlobalMode, LegacyMode: - return false - default: - return true - } -} - -func (m Mode) ShouldIncludePregeneratedFiles() bool { - switch m { - case DisableMode, DisableGlobalMode, LegacyMode: - return true - default: - return false - } -} - -func (m Mode) ShouldUseKnownImports() bool { - return m != DisableGlobalMode -} - -type modeFlag struct { - mode *Mode -} - -func (f *modeFlag) Set(value string) error { - if mode, err := ModeFromString(value); err != nil { - return err - } else { - *f.mode = mode - return nil - } -} - -func (f *modeFlag) String() string { - var mode Mode - if f != nil && f.mode != nil { - mode = *f.mode - } - return mode.String() -} - -func (_ *protoLang) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) { - pc := &ProtoConfig{} - c.Exts[protoName] = pc - - // Note: the -proto flag does not set the ModeExplicit flag. We want to - // be able to switch to DisableMode in vendor directories, even when - // this is set for compatibility with older versions. - fs.Var(&modeFlag{&pc.Mode}, "proto", "default: generates a proto_library rule for one package\n\tpackage: generates a proto_library rule for for each package\n\tdisable: does not touch proto rules\n\tdisable_global: does not touch proto rules and does not use special cases for protos in dependency resolution") - fs.StringVar(&pc.groupOption, "proto_group", "", "option name used to group .proto files into proto_library rules") -} - -func (_ *protoLang) CheckFlags(fs *flag.FlagSet, c *config.Config) error { - return nil -} - -func (_ *protoLang) KnownDirectives() []string { - return []string{"proto", "proto_group"} -} - -func (_ *protoLang) Configure(c *config.Config, rel string, f *rule.File) { - pc := &ProtoConfig{} - *pc = *GetProtoConfig(c) - c.Exts[protoName] = pc - if f != nil { - for _, d := range f.Directives { - switch d.Key { - case "proto": - mode, err := ModeFromString(d.Value) - if err != nil { - log.Print(err) - continue - } - pc.Mode = mode - pc.ModeExplicit = true - case "proto_group": - pc.groupOption = d.Value - } - } - } - inferProtoMode(c, rel, f) -} - -// inferProtoMode sets ProtoConfig.Mode based on the directory name and the -// contents of f. If the proto mode is set explicitly, this function does not -// change it. If this is a vendor directory, or go_proto_library is loaded from -// another file, proto rule generation is disabled. -// -// TODO(jayconrod): this logic is archaic, now that rules are generated by -// separate language extensions. Proto rule generation should be independent -// from Go. -func inferProtoMode(c *config.Config, rel string, f *rule.File) { - pc := GetProtoConfig(c) - if pc.Mode != DefaultMode || pc.ModeExplicit { - return - } - if pc.GoPrefix == wellKnownTypesGoPrefix { - pc.Mode = LegacyMode - return - } - if path.Base(rel) == "vendor" { - pc.Mode = DisableMode - return - } - if f == nil { - return - } - mode := DefaultMode -outer: - for _, l := range f.Loads { - name := l.Name() - if name == "@io_bazel_rules_go//proto:def.bzl" { - break - } - if name == "@io_bazel_rules_go//proto:go_proto_library.bzl" { - mode = LegacyMode - break - } - for _, sym := range l.Symbols() { - if sym == "go_proto_library" { - mode = DisableMode - break outer - } - } - } - if mode == DefaultMode || pc.Mode == mode || c.ShouldFix && mode == LegacyMode { - return - } - pc.Mode = mode -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/constants.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/constants.go deleted file mode 100644 index be6bb4c8d6..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/constants.go +++ /dev/null @@ -1,27 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -const ( - // PackageInfoKey is the name of a private attribute set on generated - // proto_library rules. This attribute contains a Package record which - // describes the library and its sources. - PackageKey = "_package" - - // wellKnownTypesGoPrefix is the import path for the Go repository containing - // pre-generated code for the Well Known Types. - wellKnownTypesGoPrefix = "github.com/golang/protobuf" -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fileinfo.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fileinfo.go deleted file mode 100644 index 640fdb6c08..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fileinfo.go +++ /dev/null @@ -1,138 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import ( - "bytes" - "io/ioutil" - "log" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" -) - -// FileInfo contains metadata extracted from a .proto file. -type FileInfo struct { - Path, Name string - - PackageName string - - Options []Option - Imports []string - - HasServices bool -} - -// Option represents a top-level option statement in a .proto file. Only -// string options are supported for now. -type Option struct { - Key, Value string -} - -var protoRe = buildProtoRegexp() - -func protoFileInfo(dir, name string) FileInfo { - info := FileInfo{ - Path: filepath.Join(dir, name), - Name: name, - } - content, err := ioutil.ReadFile(info.Path) - if err != nil { - log.Printf("%s: error reading proto file: %v", info.Path, err) - return info - } - - for _, match := range protoRe.FindAllSubmatch(content, -1) { - switch { - case match[importSubexpIndex] != nil: - imp := unquoteProtoString(match[importSubexpIndex]) - info.Imports = append(info.Imports, imp) - - case match[packageSubexpIndex] != nil: - pkg := string(match[packageSubexpIndex]) - if info.PackageName == "" { - info.PackageName = pkg - } - - case match[optkeySubexpIndex] != nil: - key := string(match[optkeySubexpIndex]) - value := unquoteProtoString(match[optvalSubexpIndex]) - info.Options = append(info.Options, Option{key, value}) - - case match[serviceSubexpIndex] != nil: - info.HasServices = true - - default: - // Comment matched. Nothing to extract. - } - } - sort.Strings(info.Imports) - - return info -} - -const ( - importSubexpIndex = 1 - packageSubexpIndex = 2 - optkeySubexpIndex = 3 - optvalSubexpIndex = 4 - serviceSubexpIndex = 5 -) - -// Based on https://developers.google.com/protocol-buffers/docs/reference/proto3-spec -func buildProtoRegexp() *regexp.Regexp { - hexEscape := `\\[xX][0-9a-fA-f]{2}` - octEscape := `\\[0-7]{3}` - charEscape := `\\[abfnrtv'"\\]` - charValue := strings.Join([]string{hexEscape, octEscape, charEscape, "[^\x00\\'\\\"\\\\]"}, "|") - strLit := `'(?:` + charValue + `|")*'|"(?:` + charValue + `|')*"` - ident := `[A-Za-z][A-Za-z0-9_]*` - fullIdent := ident + `(?:\.` + ident + `)*` - importStmt := `\bimport\s*(?:public|weak)?\s*(?P` + strLit + `)\s*;` - packageStmt := `\bpackage\s*(?P` + fullIdent + `)\s*;` - optionStmt := `\boption\s*(?P` + fullIdent + `)\s*=\s*(?P` + strLit + `)\s*;` - serviceStmt := `(?Pservice)` - comment := `//[^\n]*` - protoReSrc := strings.Join([]string{importStmt, packageStmt, optionStmt, serviceStmt, comment}, "|") - return regexp.MustCompile(protoReSrc) -} - -func unquoteProtoString(q []byte) string { - // Adjust quotes so that Unquote is happy. We need a double quoted string - // without unescaped double quote characters inside. - noQuotes := bytes.Split(q[1:len(q)-1], []byte{'"'}) - if len(noQuotes) != 1 { - for i := 0; i < len(noQuotes)-1; i++ { - if len(noQuotes[i]) == 0 || noQuotes[i][len(noQuotes[i])-1] != '\\' { - noQuotes[i] = append(noQuotes[i], '\\') - } - } - q = append([]byte{'"'}, bytes.Join(noQuotes, []byte{'"'})...) - q = append(q, '"') - } - if q[0] == '\'' { - q[0] = '"' - q[len(q)-1] = '"' - } - - s, err := strconv.Unquote(string(q)) - if err != nil { - log.Panicf("unquoting string literal %s from proto: %v", q, err) - } - return s -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fix.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fix.go deleted file mode 100644 index c8e67bf8fb..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/fix.go +++ /dev/null @@ -1,24 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import ( - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func (_ *protoLang) Fix(c *config.Config, f *rule.File) { -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/generate.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/generate.go deleted file mode 100644 index e9e3779883..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/generate.go +++ /dev/null @@ -1,285 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import ( - "fmt" - "log" - "sort" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/language" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func (_ *protoLang) GenerateRules(args language.GenerateArgs) language.GenerateResult { - c := args.Config - pc := GetProtoConfig(c) - if !pc.Mode.ShouldGenerateRules() { - // Don't create or delete proto rules in this mode. Any existing rules - // are likely hand-written. - return language.GenerateResult{} - } - - var regularProtoFiles []string - for _, name := range args.RegularFiles { - if strings.HasSuffix(name, ".proto") { - regularProtoFiles = append(regularProtoFiles, name) - } - } - var genProtoFiles []string - for _, name := range args.GenFiles { - if strings.HasSuffix(name, ".proto") { - genProtoFiles = append(args.GenFiles, name) - } - } - pkgs := buildPackages(pc, args.Dir, args.Rel, regularProtoFiles, genProtoFiles) - shouldSetVisibility := !hasDefaultVisibility(args.File) - var res language.GenerateResult - for _, pkg := range pkgs { - r := generateProto(pc, args.Rel, pkg, shouldSetVisibility) - if r.IsEmpty(protoKinds[r.Kind()]) { - res.Empty = append(res.Empty, r) - } else { - res.Gen = append(res.Gen, r) - } - } - sort.SliceStable(res.Gen, func(i, j int) bool { - return res.Gen[i].Name() < res.Gen[j].Name() - }) - res.Imports = make([]interface{}, len(res.Gen)) - for i, r := range res.Gen { - res.Imports[i] = r.PrivateAttr(config.GazelleImportsKey) - } - res.Empty = append(res.Empty, generateEmpty(args.File, regularProtoFiles, genProtoFiles)...) - return res -} - -// RuleName returns a name for a proto_library derived from the given strings. -// For each string, RuleName will look for a non-empty suffix of identifier -// characters and then append "_proto" to that. -func RuleName(names ...string) string { - base := "root" - for _, name := range names { - notIdent := func(c rune) bool { - return !('A' <= c && c <= 'Z' || - 'a' <= c && c <= 'z' || - '0' <= c && c <= '9' || - c == '_') - } - if i := strings.LastIndexFunc(name, notIdent); i >= 0 { - name = name[i+1:] - } - if name != "" { - base = name - break - } - } - return base + "_proto" -} - -// buildPackage extracts metadata from the .proto files in a directory and -// constructs possibly several packages, then selects a package to generate -// a proto_library rule for. -func buildPackages(pc *ProtoConfig, dir, rel string, protoFiles, genFiles []string) []*Package { - packageMap := make(map[string]*Package) - for _, name := range protoFiles { - info := protoFileInfo(dir, name) - key := info.PackageName - if pc.groupOption != "" { - for _, opt := range info.Options { - if opt.Key == pc.groupOption { - key = opt.Value - break - } - } - } - if packageMap[key] == nil { - packageMap[key] = newPackage(info.PackageName) - } - packageMap[key].addFile(info) - } - - switch pc.Mode { - case DefaultMode: - pkg, err := selectPackage(dir, rel, packageMap) - if err != nil { - log.Print(err) - } - if pkg == nil { - return nil // empty rule created in generateEmpty - } - for _, name := range genFiles { - pkg.addGenFile(dir, name) - } - return []*Package{pkg} - - case PackageMode: - pkgs := make([]*Package, 0, len(packageMap)) - for _, pkg := range packageMap { - pkgs = append(pkgs, pkg) - } - return pkgs - - default: - return nil - } -} - -// selectPackage chooses a package to generate rules for. -func selectPackage(dir, rel string, packageMap map[string]*Package) (*Package, error) { - if len(packageMap) == 0 { - return nil, nil - } - if len(packageMap) == 1 { - for _, pkg := range packageMap { - return pkg, nil - } - } - defaultPackageName := strings.Replace(rel, "/", "_", -1) - for _, pkg := range packageMap { - if pkgName := goPackageName(pkg); pkgName != "" && pkgName == defaultPackageName { - return pkg, nil - } - } - return nil, fmt.Errorf("%s: directory contains multiple proto packages. Gazelle can only generate a proto_library for one package.", dir) -} - -// goPackageName guesses the identifier in package declarations at the top of -// the .pb.go files that will be generated for this package. "" is returned -// if the package name cannot be determined. -// -// TODO(jayconrod): remove all Go-specific functionality. This is here -// temporarily for compatibility. -func goPackageName(pkg *Package) string { - if opt, ok := pkg.Options["go_package"]; ok { - if i := strings.IndexByte(opt, ';'); i >= 0 { - return opt[i+1:] - } else if i := strings.LastIndexByte(opt, '/'); i >= 0 { - return opt[i+1:] - } else { - return opt - } - } - if pkg.Name != "" { - return strings.Replace(pkg.Name, ".", "_", -1) - } - if len(pkg.Files) == 1 { - for s := range pkg.Files { - return strings.TrimSuffix(s, ".proto") - } - } - return "" -} - -// generateProto creates a new proto_library rule for a package. The rule may -// be empty if there are no sources. -func generateProto(pc *ProtoConfig, rel string, pkg *Package, shouldSetVisibility bool) *rule.Rule { - var name string - if pc.Mode == DefaultMode { - name = RuleName(goPackageName(pkg), pc.GoPrefix, rel) - } else { - name = RuleName(pkg.Options[pc.groupOption], pkg.Name, rel) - } - r := rule.NewRule("proto_library", name) - srcs := make([]string, 0, len(pkg.Files)) - for f := range pkg.Files { - srcs = append(srcs, f) - } - sort.Strings(srcs) - if len(srcs) > 0 { - r.SetAttr("srcs", srcs) - } - r.SetPrivateAttr(PackageKey, *pkg) - imports := make([]string, 0, len(pkg.Imports)) - for i := range pkg.Imports { - imports = append(imports, i) - } - sort.Strings(imports) - // NOTE: This attribute should not be used outside this extension. It's still - // convenient for testing though. - r.SetPrivateAttr(config.GazelleImportsKey, imports) - for k, v := range pkg.Options { - r.SetPrivateAttr(k, v) - } - if shouldSetVisibility { - vis := checkInternalVisibility(rel, "//visibility:public") - r.SetAttr("visibility", []string{vis}) - } - return r -} - -// generateEmpty generates a list of proto_library rules that may be deleted. -// This is generated from existing proto_library rules with srcs lists that -// don't match any static or generated files. -func generateEmpty(f *rule.File, regularFiles, genFiles []string) []*rule.Rule { - if f == nil { - return nil - } - knownFiles := make(map[string]bool) - for _, f := range regularFiles { - knownFiles[f] = true - } - for _, f := range genFiles { - knownFiles[f] = true - } - var empty []*rule.Rule -outer: - for _, r := range f.Rules { - if r.Kind() != "proto_library" { - continue - } - srcs := r.AttrStrings("srcs") - if len(srcs) == 0 && r.Attr("srcs") != nil { - // srcs is not a string list; leave it alone - continue - } - for _, src := range r.AttrStrings("srcs") { - if knownFiles[src] { - continue outer - } - } - empty = append(empty, rule.NewRule("proto_library", r.Name())) - } - return empty -} - -// hasDefaultVisibility returns whether oldFile contains a "package" rule with -// a "default_visibility" attribute. Rules generated by Gazelle should not -// have their own visibility attributes if this is the case. -func hasDefaultVisibility(f *rule.File) bool { - if f == nil { - return false - } - for _, r := range f.Rules { - if r.Kind() == "package" && r.Attr("default_visibility") != nil { - return true - } - } - return false -} - -// checkInternalVisibility overrides the given visibility if the package is -// internal. -func checkInternalVisibility(rel, visibility string) string { - if i := strings.LastIndex(rel, "/internal/"); i >= 0 { - visibility = fmt.Sprintf("//%s:__subpackages__", rel[:i]) - } else if strings.HasPrefix(rel, "internal/") { - visibility = "//:__subpackages__" - } - return visibility -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/kinds.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/kinds.go deleted file mode 100644 index 9fe7d45e66..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/kinds.go +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import "github.com/bazelbuild/bazel-gazelle/rule" - -var protoKinds = map[string]rule.KindInfo{ - "proto_library": { - NonEmptyAttrs: map[string]bool{"srcs": true}, - MergeableAttrs: map[string]bool{"srcs": true}, - ResolveAttrs: map[string]bool{"deps": true}, - }, -} - -func (_ *protoLang) Kinds() map[string]rule.KindInfo { return protoKinds } -func (_ *protoLang) Loads() []rule.LoadInfo { return nil } diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/known_imports.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/known_imports.go deleted file mode 100644 index 551a87e1d1..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/known_imports.go +++ /dev/null @@ -1,366 +0,0 @@ - -// Generated by internal/language/proto/gen/gen_known_imports.go -// From internal/language/proto/proto.csv - -package proto - -import "github.com/bazelbuild/bazel-gazelle/label" - -var knownImports = map[string]label.Label{ - - "google/protobuf/any.proto": label.New("com_google_protobuf", "", "any_proto"), - "google/protobuf/api.proto": label.New("com_google_protobuf", "", "api_proto"), - "google/protobuf/compiler/plugin.proto": label.New("com_google_protobuf", "", "compiler_plugin_proto"), - "google/protobuf/descriptor.proto": label.New("com_google_protobuf", "", "descriptor_proto"), - "google/protobuf/duration.proto": label.New("com_google_protobuf", "", "duration_proto"), - "google/protobuf/empty.proto": label.New("com_google_protobuf", "", "empty_proto"), - "google/protobuf/field_mask.proto": label.New("com_google_protobuf", "", "field_mask_proto"), - "google/protobuf/source_context.proto": label.New("com_google_protobuf", "", "source_context_proto"), - "google/protobuf/struct.proto": label.New("com_google_protobuf", "", "struct_proto"), - "google/protobuf/timestamp.proto": label.New("com_google_protobuf", "", "timestamp_proto"), - "google/protobuf/type.proto": label.New("com_google_protobuf", "", "type_proto"), - "google/protobuf/wrappers.proto": label.New("com_google_protobuf", "", "wrappers_proto"), - "google/api/annotations.proto": label.New("go_googleapis", "google/api", "annotations_proto"), - "google/api/auth.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/backend.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/billing.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/config_change.proto": label.New("go_googleapis", "google/api", "configchange_proto"), - "google/api/consumer.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/context.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/control.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/distribution.proto": label.New("go_googleapis", "google/api", "distribution_proto"), - "google/api/documentation.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/endpoint.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/experimental/authorization_config.proto": label.New("go_googleapis", "google/api", "api_proto"), - "google/api/experimental/experimental.proto": label.New("go_googleapis", "google/api", "api_proto"), - "google/api/expr/v1alpha1/cel_service.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_proto"), - "google/api/expr/v1alpha1/checked.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_proto"), - "google/api/expr/v1alpha1/eval.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_proto"), - "google/api/expr/v1alpha1/explain.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_proto"), - "google/api/expr/v1alpha1/syntax.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_proto"), - "google/api/expr/v1alpha1/value.proto": label.New("go_googleapis", "google/api/expr/v1alpha1", "expr_proto"), - "google/api/expr/v1beta1/decl.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_proto"), - "google/api/expr/v1beta1/eval.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_proto"), - "google/api/expr/v1beta1/expr.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_proto"), - "google/api/expr/v1beta1/source.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_proto"), - "google/api/expr/v1beta1/value.proto": label.New("go_googleapis", "google/api/expr/v1beta1", "expr_proto"), - "google/api/http.proto": label.New("go_googleapis", "google/api", "annotations_proto"), - "google/api/httpbody.proto": label.New("go_googleapis", "google/api", "httpbody_proto"), - "google/api/label.proto": label.New("go_googleapis", "google/api", "label_proto"), - "google/api/launch_stage.proto": label.New("go_googleapis", "google/api", "api_proto"), - "google/api/log.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/logging.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/metric.proto": label.New("go_googleapis", "google/api", "metric_proto"), - "google/api/monitored_resource.proto": label.New("go_googleapis", "google/api", "monitoredres_proto"), - "google/api/monitoring.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/quota.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/service.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/servicecontrol/v1/check_error.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicecontrol/v1/distribution.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicecontrol/v1/log_entry.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicecontrol/v1/metric_value.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicecontrol/v1/operation.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicecontrol/v1/quota_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicecontrol/v1/service_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"), - "google/api/servicemanagement/v1/resources.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_proto"), - "google/api/servicemanagement/v1/servicemanager.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_proto"), - "google/api/source_info.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/system_parameter.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/api/usage.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"), - "google/appengine/legacy/audit_data.proto": label.New("go_googleapis", "google/appengine/legacy", "legacy_proto"), - "google/appengine/logging/v1/request_log.proto": label.New("go_googleapis", "google/appengine/logging/v1", "logging_proto"), - "google/appengine/v1/app_yaml.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/appengine.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/application.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/audit_data.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/deploy.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/instance.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/location.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/operation.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/service.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/appengine/v1/version.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"), - "google/assistant/embedded/v1alpha1/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha1", "embedded_proto"), - "google/assistant/embedded/v1alpha2/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha2", "embedded_proto"), - "google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_proto"), - "google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_proto"), - "google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_proto"), - "google/bigtable/admin/table/v1/bigtable_table_data.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_proto"), - "google/bigtable/admin/table/v1/bigtable_table_service.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_proto"), - "google/bigtable/admin/table/v1/bigtable_table_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_proto"), - "google/bigtable/admin/v2/bigtable_instance_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"), - "google/bigtable/admin/v2/bigtable_table_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"), - "google/bigtable/admin/v2/common.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"), - "google/bigtable/admin/v2/instance.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"), - "google/bigtable/admin/v2/table.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"), - "google/bigtable/v1/bigtable_data.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_proto"), - "google/bigtable/v1/bigtable_service.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_proto"), - "google/bigtable/v1/bigtable_service_messages.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_proto"), - "google/bigtable/v2/bigtable.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_proto"), - "google/bigtable/v2/data.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_proto"), - "google/bytestream/bytestream.proto": label.New("go_googleapis", "google/bytestream", "bytestream_proto"), - "google/cloud/asset/v1beta1/asset_service.proto": label.New("go_googleapis", "google/cloud/asset/v1beta1", "asset_proto"), - "google/cloud/asset/v1beta1/assets.proto": label.New("go_googleapis", "google/cloud/asset/v1beta1", "asset_proto"), - "google/cloud/audit/audit_log.proto": label.New("go_googleapis", "google/cloud/audit", "audit_proto"), - "google/cloud/automl/v1beta1/annotation_payload.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/classification.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/data_items.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/dataset.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/image.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/io.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/model.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/model_evaluation.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/operations.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/prediction_service.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/service.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/text.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/automl/v1beta1/translation.proto": label.New("go_googleapis", "google/cloud/automl/v1beta1", "automl_proto"), - "google/cloud/bigquery/datatransfer/v1/datatransfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_proto"), - "google/cloud/bigquery/datatransfer/v1/transfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_proto"), - "google/cloud/bigquery/logging/v1/audit_data.proto": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_proto"), - "google/cloud/bigquery/storage/v1beta1/avro.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_proto"), - "google/cloud/bigquery/storage/v1beta1/read_options.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_proto"), - "google/cloud/bigquery/storage/v1beta1/storage.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_proto"), - "google/cloud/bigquery/storage/v1beta1/table_reference.proto": label.New("go_googleapis", "google/cloud/bigquery/storage/v1beta1", "storage_proto"), - "google/cloud/billing/v1/cloud_billing.proto": label.New("go_googleapis", "google/cloud/billing/v1", "billing_proto"), - "google/cloud/dataproc/v1/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_proto"), - "google/cloud/dataproc/v1/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_proto"), - "google/cloud/dataproc/v1/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_proto"), - "google/cloud/dataproc/v1beta2/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"), - "google/cloud/dataproc/v1beta2/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"), - "google/cloud/dataproc/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"), - "google/cloud/dataproc/v1beta2/shared.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"), - "google/cloud/dataproc/v1beta2/workflow_templates.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"), - "google/cloud/dialogflow/v2/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/audio_config.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/document.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/knowledge_base.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/dialogflow/v2beta1/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"), - "google/cloud/functions/v1beta2/functions.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_proto"), - "google/cloud/functions/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_proto"), - "google/cloud/iot/v1/device_manager.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_proto"), - "google/cloud/iot/v1/resources.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_proto"), - "google/cloud/kms/v1/resources.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_proto"), - "google/cloud/kms/v1/service.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_proto"), - "google/cloud/language/v1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1", "language_proto"), - "google/cloud/language/v1beta1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_proto"), - "google/cloud/language/v1beta2/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta2", "language_proto"), - "google/cloud/location/locations.proto": label.New("go_googleapis", "google/cloud/location", "location_proto"), - "google/cloud/ml/v1/job_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"), - "google/cloud/ml/v1/model_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"), - "google/cloud/ml/v1/operation_metadata.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"), - "google/cloud/ml/v1/prediction_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"), - "google/cloud/ml/v1/project_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"), - "google/cloud/oslogin/common/common.proto": label.New("go_googleapis", "google/cloud/oslogin/common", "common_proto"), - "google/cloud/oslogin/v1/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_proto"), - "google/cloud/oslogin/v1alpha/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_proto"), - "google/cloud/oslogin/v1beta/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_proto"), - "google/cloud/redis/v1/cloud_redis.proto": label.New("go_googleapis", "google/cloud/redis/v1", "redis_proto"), - "google/cloud/redis/v1beta1/cloud_redis.proto": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_proto"), - "google/cloud/resourcemanager/v2/folders.proto": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_proto"), - "google/cloud/runtimeconfig/v1beta1/resources.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_proto"), - "google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_proto"), - "google/cloud/speech/v1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1", "speech_proto"), - "google/cloud/speech/v1p1beta1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_proto"), - "google/cloud/support/common.proto": label.New("go_googleapis", "google/cloud/support", "common_proto"), - "google/cloud/support/v1alpha1/cloud_support.proto": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_proto"), - "google/cloud/tasks/v2beta2/cloudtasks.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"), - "google/cloud/tasks/v2beta2/queue.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"), - "google/cloud/tasks/v2beta2/target.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"), - "google/cloud/tasks/v2beta2/task.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"), - "google/cloud/tasks/v2beta3/cloudtasks.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_proto"), - "google/cloud/tasks/v2beta3/queue.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_proto"), - "google/cloud/tasks/v2beta3/target.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_proto"), - "google/cloud/tasks/v2beta3/task.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta3", "tasks_proto"), - "google/cloud/texttospeech/v1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_proto"), - "google/cloud/texttospeech/v1beta1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_proto"), - "google/cloud/videointelligence/v1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_proto"), - "google/cloud/videointelligence/v1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_proto"), - "google/cloud/videointelligence/v1beta2/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_proto"), - "google/cloud/videointelligence/v1p1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_proto"), - "google/cloud/videointelligence/v1p2beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1p2beta1", "videointelligence_proto"), - "google/cloud/vision/v1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"), - "google/cloud/vision/v1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"), - "google/cloud/vision/v1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"), - "google/cloud/vision/v1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"), - "google/cloud/vision/v1p1beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"), - "google/cloud/vision/v1p1beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"), - "google/cloud/vision/v1p1beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"), - "google/cloud/vision/v1p1beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"), - "google/cloud/vision/v1p2beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"), - "google/cloud/vision/v1p2beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"), - "google/cloud/vision/v1p2beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"), - "google/cloud/vision/v1p2beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"), - "google/cloud/vision/v1p3beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_proto"), - "google/cloud/vision/v1p3beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_proto"), - "google/cloud/vision/v1p3beta1/product_search.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_proto"), - "google/cloud/vision/v1p3beta1/product_search_service.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_proto"), - "google/cloud/vision/v1p3beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_proto"), - "google/cloud/vision/v1p3beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p3beta1", "vision_proto"), - "google/cloud/websecurityscanner/v1alpha/crawled_url.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/cloud/websecurityscanner/v1alpha/finding.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/cloud/websecurityscanner/v1alpha/finding_addon.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/cloud/websecurityscanner/v1alpha/scan_config.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/cloud/websecurityscanner/v1alpha/scan_run.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"), - "google/container/v1/cluster_service.proto": label.New("go_googleapis", "google/container/v1", "container_proto"), - "google/container/v1alpha1/cluster_service.proto": label.New("go_googleapis", "google/container/v1alpha1", "container_proto"), - "google/container/v1beta1/cluster_service.proto": label.New("go_googleapis", "google/container/v1beta1", "container_proto"), - "google/datastore/admin/v1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1", "admin_proto"), - "google/datastore/admin/v1/index.proto": label.New("go_googleapis", "google/datastore/admin/v1", "admin_proto"), - "google/datastore/admin/v1beta1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1beta1", "admin_proto"), - "google/datastore/v1/datastore.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_proto"), - "google/datastore/v1/entity.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_proto"), - "google/datastore/v1/query.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_proto"), - "google/datastore/v1beta3/datastore.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_proto"), - "google/datastore/v1beta3/entity.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_proto"), - "google/datastore/v1beta3/query.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_proto"), - "google/devtools/build/v1/build_events.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_proto"), - "google/devtools/build/v1/build_status.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_proto"), - "google/devtools/build/v1/publish_build_event.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_proto"), - "google/devtools/cloudbuild/v1/cloudbuild.proto": label.New("go_googleapis", "google/devtools/cloudbuild/v1", "cloudbuild_proto"), - "google/devtools/clouddebugger/v2/controller.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_proto"), - "google/devtools/clouddebugger/v2/data.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_proto"), - "google/devtools/clouddebugger/v2/debugger.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_proto"), - "google/devtools/clouderrorreporting/v1beta1/common.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"), - "google/devtools/clouderrorreporting/v1beta1/error_group_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"), - "google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"), - "google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"), - "google/devtools/cloudprofiler/v2/profiler.proto": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_proto"), - "google/devtools/cloudtrace/v1/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_proto"), - "google/devtools/cloudtrace/v2/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_proto"), - "google/devtools/cloudtrace/v2/tracing.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_proto"), - "google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1alpha1/containeranalysis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1alpha1/image_basis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1alpha1/provenance.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1alpha1/source_context.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1beta1/attestation/attestation.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/attestation", "attestation_proto"), - "google/devtools/containeranalysis/v1beta1/build/build.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/build", "build_proto"), - "google/devtools/containeranalysis/v1beta1/common/common.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/common", "common_proto"), - "google/devtools/containeranalysis/v1beta1/containeranalysis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1", "containeranalysis_proto"), - "google/devtools/containeranalysis/v1beta1/deployment/deployment.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/deployment", "deployment_proto"), - "google/devtools/containeranalysis/v1beta1/discovery/discovery.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/discovery", "discovery_proto"), - "google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/grafeas", "grafeas_proto"), - "google/devtools/containeranalysis/v1beta1/image/image.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/image", "image_proto"), - "google/devtools/containeranalysis/v1beta1/package/package.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/package", "package_proto"), - "google/devtools/containeranalysis/v1beta1/provenance/provenance.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/provenance", "provenance_proto"), - "google/devtools/containeranalysis/v1beta1/source/source.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/source", "source_proto"), - "google/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1beta1/vulnerability", "vulnerability_proto"), - "google/devtools/remoteexecution/v1test/remote_execution.proto": label.New("go_googleapis", "google/devtools/remoteexecution/v1test", "remoteexecution_proto"), - "google/devtools/remoteworkers/v1test2/bots.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"), - "google/devtools/remoteworkers/v1test2/command.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"), - "google/devtools/remoteworkers/v1test2/tasks.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"), - "google/devtools/remoteworkers/v1test2/worker.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"), - "google/devtools/resultstore/v2/action.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/common.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/configuration.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/configured_target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/coverage.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/coverage_summary.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/file.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/file_set.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/invocation.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/resultstore_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/resultstore_file_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/resultstore/v2/test_suite.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"), - "google/devtools/source/v1/source_context.proto": label.New("go_googleapis", "google/devtools/source/v1", "source_proto"), - "google/devtools/sourcerepo/v1/sourcerepo.proto": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_proto"), - "google/example/library/v1/library.proto": label.New("go_googleapis", "google/example/library/v1", "library_proto"), - "google/firestore/admin/v1beta1/firestore_admin.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_proto"), - "google/firestore/admin/v1beta1/index.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_proto"), - "google/firestore/admin/v1beta2/field.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_proto"), - "google/firestore/admin/v1beta2/firestore_admin.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_proto"), - "google/firestore/admin/v1beta2/index.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_proto"), - "google/firestore/admin/v1beta2/operation.proto": label.New("go_googleapis", "google/firestore/admin/v1beta2", "admin_proto"), - "google/firestore/v1beta1/common.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"), - "google/firestore/v1beta1/document.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"), - "google/firestore/v1beta1/firestore.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"), - "google/firestore/v1beta1/query.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"), - "google/firestore/v1beta1/write.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"), - "google/genomics/v1/annotations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/cigar.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/datasets.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/operations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/position.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/range.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/readalignment.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/readgroup.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/readgroupset.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/reads.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/references.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1/variants.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"), - "google/genomics/v1alpha2/pipelines.proto": label.New("go_googleapis", "google/genomics/v1alpha2", "genomics_proto"), - "google/home/graph/v1/device.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_proto"), - "google/home/graph/v1/homegraph.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_proto"), - "google/iam/admin/v1/iam.proto": label.New("go_googleapis", "google/iam/admin/v1", "admin_proto"), - "google/iam/credentials/v1/common.proto": label.New("go_googleapis", "google/iam/credentials/v1", "credentials_proto"), - "google/iam/credentials/v1/iamcredentials.proto": label.New("go_googleapis", "google/iam/credentials/v1", "credentials_proto"), - "google/iam/v1/iam_policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_proto"), - "google/iam/v1/logging/audit_data.proto": label.New("go_googleapis", "google/iam/v1/logging", "logging_proto"), - "google/iam/v1/policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_proto"), - "google/logging/type/http_request.proto": label.New("go_googleapis", "google/logging/type", "ltype_proto"), - "google/logging/type/log_severity.proto": label.New("go_googleapis", "google/logging/type", "ltype_proto"), - "google/logging/v2/log_entry.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"), - "google/logging/v2/logging.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"), - "google/logging/v2/logging_config.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"), - "google/logging/v2/logging_metrics.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"), - "google/longrunning/operations.proto": label.New("go_googleapis", "google/longrunning", "longrunning_proto"), - "google/monitoring/v3/alert.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/alert_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/common.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/dropped_labels.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/group.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/group_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/metric.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/metric_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/mutation_record.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/notification.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/notification_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/span_context.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/uptime.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/monitoring/v3/uptime_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"), - "google/privacy/dlp/v2/dlp.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_proto"), - "google/privacy/dlp/v2/storage.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_proto"), - "google/pubsub/v1/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1", "pubsub_proto"), - "google/pubsub/v1beta2/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1beta2", "pubsub_proto"), - "google/rpc/code.proto": label.New("go_googleapis", "google/rpc", "code_proto"), - "google/rpc/error_details.proto": label.New("go_googleapis", "google/rpc", "errdetails_proto"), - "google/rpc/status.proto": label.New("go_googleapis", "google/rpc", "status_proto"), - "google/spanner/admin/database/v1/spanner_database_admin.proto": label.New("go_googleapis", "google/spanner/admin/database/v1", "database_proto"), - "google/spanner/admin/instance/v1/spanner_instance_admin.proto": label.New("go_googleapis", "google/spanner/admin/instance/v1", "instance_proto"), - "google/spanner/v1/keys.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/spanner/v1/mutation.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/spanner/v1/query_plan.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/spanner/v1/result_set.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/spanner/v1/spanner.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/spanner/v1/transaction.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/spanner/v1/type.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"), - "google/storagetransfer/v1/transfer.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_proto"), - "google/storagetransfer/v1/transfer_types.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_proto"), - "google/streetview/publish/v1/resources.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_proto"), - "google/streetview/publish/v1/rpcmessages.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_proto"), - "google/streetview/publish/v1/streetview_publish.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_proto"), - "google/type/color.proto": label.New("go_googleapis", "google/type", "color_proto"), - "google/type/date.proto": label.New("go_googleapis", "google/type", "date_proto"), - "google/type/dayofweek.proto": label.New("go_googleapis", "google/type", "dayofweek_proto"), - "google/type/latlng.proto": label.New("go_googleapis", "google/type", "latlng_proto"), - "google/type/money.proto": label.New("go_googleapis", "google/type", "money_proto"), - "google/type/postal_address.proto": label.New("go_googleapis", "google/type", "postaladdress_proto"), - "google/type/timeofday.proto": label.New("go_googleapis", "google/type", "timeofday_proto"), - "google/watcher/v1/watch.proto": label.New("go_googleapis", "google/watcher/v1", "watcher_proto"), -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/lang.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/lang.go deleted file mode 100644 index fe009df88d..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/lang.go +++ /dev/null @@ -1,72 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package proto provides support for protocol buffer rules. -// It generates proto_library rules only (not go_proto_library or any other -// language-specific implementations). -// -// Configuration -// -// Configuration is largely controlled by Mode. In disable mode, proto rules are -// left alone (neither generated nor deleted). In legacy mode, filegroups are -// emitted containing protos. In default mode, proto_library rules are -// emitted. The proto mode may be set with the -proto command line flag or the -// "# gazelle:proto" directive. -// -// The configuration is largely public, and other languages may depend on it. -// For example, go uses Mode to determine whether to generate go_proto_library -// rules and ignore static .pb.go files. -// -// Rule generation -// -// Currently, Gazelle generates at most one proto_library per directory. Protos -// in the same package are grouped together into a proto_library. If there are -// sources for multiple packages, the package name that matches the directory -// name will be chosen; if there is no such package, an error will be printed. -// We expect to provide support for multiple proto_libraries in the future -// when Go has support for multiple packages and we have better rule matching. -// The generated proto_library will be named after the directory, not the -// proto or the package. For example, for foo/bar/baz.proto, a proto_library -// rule will be generated named //foo/bar:bar_proto. -// -// Dependency resolution -// -// proto_library rules are indexed by their srcs attribute. Gazelle attempts -// to resolve proto imports (e.g., import foo/bar/bar.proto) to the -// proto_library that contains the named source file -// (e.g., //foo/bar:bar_proto). If no indexed proto_library provides the source -// file, Gazelle will guess a label, following conventions. -// -// No attempt is made to resolve protos to rules in external repositories, -// since there's no indication that a proto import comes from an external -// repository. In the future, build files in external repos will be indexed, -// so we can support this (#12). -// -// Gazelle has special cases for Well Known Types (i.e., imports of the form -// google/protobuf/*.proto). These are resolved to rules in -// @com_google_protobuf. -package proto - -import "github.com/bazelbuild/bazel-gazelle/language" - -const protoName = "proto" - -type protoLang struct{} - -func (_ *protoLang) Name() string { return protoName } - -func NewLanguage() language.Language { - return &protoLang{} -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/package.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/package.go deleted file mode 100644 index fba05f0c32..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/package.go +++ /dev/null @@ -1,55 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import "path/filepath" - -// Package contains metadata for a set of .proto files that have the -// same package name. This translates to a proto_library rule. -type Package struct { - Name string - Files map[string]FileInfo - Imports map[string]bool - Options map[string]string - HasServices bool -} - -func newPackage(name string) *Package { - return &Package{ - Name: name, - Files: map[string]FileInfo{}, - Imports: map[string]bool{}, - Options: map[string]string{}, - } -} - -func (p *Package) addFile(info FileInfo) { - p.Files[info.Name] = info - for _, imp := range info.Imports { - p.Imports[imp] = true - } - for _, opt := range info.Options { - p.Options[opt.Key] = opt.Value - } - p.HasServices = p.HasServices || info.HasServices -} - -func (p *Package) addGenFile(dir, name string) { - p.Files[name] = FileInfo{ - Name: name, - Path: filepath.Join(dir, filepath.FromSlash(name)), - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/resolve.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/resolve.go deleted file mode 100644 index 3c6fc6f1bf..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/language/proto/resolve.go +++ /dev/null @@ -1,125 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package proto - -import ( - "errors" - "fmt" - "log" - "path" - "sort" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/label" - "github.com/bazelbuild/bazel-gazelle/repo" - "github.com/bazelbuild/bazel-gazelle/resolve" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -func (_ *protoLang) Imports(c *config.Config, r *rule.Rule, f *rule.File) []resolve.ImportSpec { - rel := f.Pkg - srcs := r.AttrStrings("srcs") - imports := make([]resolve.ImportSpec, len(srcs)) - for i, src := range srcs { - imports[i] = resolve.ImportSpec{Lang: "proto", Imp: path.Join(rel, src)} - } - return imports -} - -func (_ *protoLang) Embeds(r *rule.Rule, from label.Label) []label.Label { - return nil -} - -func (_ *protoLang) Resolve(c *config.Config, ix *resolve.RuleIndex, rc *repo.RemoteCache, r *rule.Rule, importsRaw interface{}, from label.Label) { - if importsRaw == nil { - // may not be set in tests. - return - } - imports := importsRaw.([]string) - r.DelAttr("deps") - depSet := make(map[string]bool) - for _, imp := range imports { - l, err := resolveProto(c, ix, r, imp, from) - if err == skipImportError { - continue - } else if err != nil { - log.Print(err) - } else { - l = l.Rel(from.Repo, from.Pkg) - depSet[l.String()] = true - } - } - if len(depSet) > 0 { - deps := make([]string, 0, len(depSet)) - for dep := range depSet { - deps = append(deps, dep) - } - sort.Strings(deps) - r.SetAttr("deps", deps) - } -} - -var ( - skipImportError = errors.New("std import") - notFoundError = errors.New("not found") -) - -func resolveProto(c *config.Config, ix *resolve.RuleIndex, r *rule.Rule, imp string, from label.Label) (label.Label, error) { - pc := GetProtoConfig(c) - if !strings.HasSuffix(imp, ".proto") { - return label.NoLabel, fmt.Errorf("can't import non-proto: %q", imp) - } - - if l, ok := resolve.FindRuleWithOverride(c, resolve.ImportSpec{Imp: imp, Lang: "proto"}, "proto"); ok { - return l, nil - } - - if l, ok := knownImports[imp]; ok && pc.Mode.ShouldUseKnownImports() { - if l.Equal(from) { - return label.NoLabel, skipImportError - } else { - return l, nil - } - } - - if l, err := resolveWithIndex(ix, imp, from); err == nil || err == skipImportError { - return l, err - } else if err != notFoundError { - return label.NoLabel, err - } - - rel := path.Dir(imp) - if rel == "." { - rel = "" - } - name := RuleName(rel) - return label.New("", rel, name), nil -} - -func resolveWithIndex(ix *resolve.RuleIndex, imp string, from label.Label) (label.Label, error) { - matches := ix.FindRulesByImport(resolve.ImportSpec{Lang: "proto", Imp: imp}, "proto") - if len(matches) == 0 { - return label.NoLabel, notFoundError - } - if len(matches) > 1 { - return label.NoLabel, fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", matches[0].Label, matches[1].Label, imp, from) - } - if matches[0].IsSelfImport(from) { - return label.NoLabel, skipImportError - } - return matches[0].Label, nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/BUILD.bazel deleted file mode 100644 index 99fba1a927..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/BUILD.bazel +++ /dev/null @@ -1,13 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "fix.go", - "merger.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger", - importpath = "github.com/bazelbuild/bazel-gazelle/merger", - visibility = ["//visibility:public"], - deps = ["//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/fix.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/fix.go deleted file mode 100644 index ed59fbe2f1..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/fix.go +++ /dev/null @@ -1,199 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package merger - -import ( - "fmt" - "strings" - - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// FixLoads removes loads of unused go rules and adds loads of newly used rules. -// This should be called after FixFile and MergeFile, since symbols -// may be introduced that aren't loaded. -// -// This function calls File.Sync before processing loads. -func FixLoads(f *rule.File, knownLoads []rule.LoadInfo) { - knownFiles := make(map[string]bool) - knownKinds := make(map[string]string) - for _, l := range knownLoads { - knownFiles[l.Name] = true - for _, k := range l.Symbols { - knownKinds[k] = l.Name - } - } - - // Sync the file. We need File.Loads and File.Rules to contain inserted - // statements and not deleted statements. - f.Sync() - - // Scan load statements in the file. Keep track of loads of known files, - // since these may be changed. Keep track of symbols loaded from unknown - // files; we will not add loads for these. - var loads []*rule.Load - otherLoadedKinds := make(map[string]bool) - for _, l := range f.Loads { - if knownFiles[l.Name()] { - loads = append(loads, l) - continue - } - for _, sym := range l.Symbols() { - otherLoadedKinds[sym] = true - } - } - - // Make a map of all the symbols from known files used in this file. - usedKinds := make(map[string]map[string]bool) - for _, r := range f.Rules { - kind := r.Kind() - if file, ok := knownKinds[kind]; ok && !otherLoadedKinds[kind] { - if usedKinds[file] == nil { - usedKinds[file] = make(map[string]bool) - } - usedKinds[file][kind] = true - } - } - - // Fix the load statements. The order is important, so we iterate over - // knownLoads instead of knownFiles. - for _, known := range knownLoads { - file := known.Name - first := true - for _, l := range loads { - if l.Name() != file { - continue - } - if first { - fixLoad(l, file, usedKinds[file], knownKinds) - first = false - } else { - fixLoad(l, file, nil, knownKinds) - } - if l.IsEmpty() { - l.Delete() - } - } - if first { - load := fixLoad(nil, file, usedKinds[file], knownKinds) - if load != nil { - index := newLoadIndex(f, known.After) - load.Insert(f, index) - } - } - } -} - -// fixLoad updates a load statement with the given symbols. If load is nil, -// a new load may be created and returned. Symbols in kinds will be added -// to the load if they're not already present. Known symbols not in kinds -// will be removed if present. Other symbols will be preserved. If load is -// empty, nil is returned. -func fixLoad(load *rule.Load, file string, kinds map[string]bool, knownKinds map[string]string) *rule.Load { - if load == nil { - if len(kinds) == 0 { - return nil - } - load = rule.NewLoad(file) - } - - for k := range kinds { - load.Add(k) - } - for _, k := range load.Symbols() { - if knownKinds[k] != "" && !kinds[k] { - load.Remove(k) - } - } - return load -} - -// newLoadIndex returns the index in stmts where a new load statement should -// be inserted. after is a list of function names that the load should not -// be inserted before. -func newLoadIndex(f *rule.File, after []string) int { - if len(after) == 0 { - return 0 - } - index := 0 - for _, r := range f.Rules { - for _, a := range after { - if r.Kind() == a && r.Index() >= index { - index = r.Index() + 1 - } - } - } - return index -} - -// FixWorkspace updates rules in the WORKSPACE file f that were used with an -// older version of rules_go or gazelle. -func FixWorkspace(f *rule.File) { - removeLegacyGoRepository(f) -} - -// CheckGazelleLoaded searches the given WORKSPACE file for a repository named -// "bazel_gazelle". If no such repository is found *and* the repo is not -// declared with a directive *and* at least one load statement mentions -// the repository, a descriptive error will be returned. -// -// This should be called after modifications have been made to WORKSPACE -// (i.e., after FixLoads) before writing it to disk. -func CheckGazelleLoaded(f *rule.File) error { - needGazelle := false - for _, l := range f.Loads { - if strings.HasPrefix(l.Name(), "@bazel_gazelle//") { - needGazelle = true - } - } - if !needGazelle { - return nil - } - for _, r := range f.Rules { - if r.Name() == "bazel_gazelle" { - return nil - } - } - for _, d := range f.Directives { - if d.Key != "repo" { - continue - } - if fs := strings.Fields(d.Value); len(fs) > 0 && fs[0] == "bazel_gazelle" { - return nil - } - } - return fmt.Errorf(`%s: error: bazel_gazelle is not declared in WORKSPACE. -Without this repository, Gazelle cannot safely modify the WORKSPACE file. -See the instructions at https://github.com/bazelbuild/bazel-gazelle. -If the bazel_gazelle is declared inside a macro, you can suppress this error -by adding a comment like this to WORKSPACE: - # gazelle:repo bazel_gazelle -`, f.Path) -} - -// removeLegacyGoRepository removes loads of go_repository from -// @io_bazel_rules_go. FixLoads should be called after this; it will load from -// @bazel_gazelle. -func removeLegacyGoRepository(f *rule.File) { - for _, l := range f.Loads { - if l.Name() == "@io_bazel_rules_go//go:def.bzl" { - l.Remove("go_repository") - if l.IsEmpty() { - l.Delete() - } - } - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/merger.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/merger.go deleted file mode 100644 index e658f46f33..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/merger/merger.go +++ /dev/null @@ -1,250 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package merger provides functions for merging generated rules into -// existing build files. -// -// Gazelle's normal workflow is roughly as follows: -// -// 1. Read metadata from sources. -// -// 2. Generate new rules. -// -// 3. Merge newly generated rules with rules in the existing build file -// if there is one. -// -// 4. Build an index of merged library rules for dependency resolution. -// -// 5. Resolve dependencies (i.e., convert import strings to deps labels). -// -// 6. Merge the newly resolved dependencies. -// -// 7. Write the merged file back to disk. -// -// This package is used for sets 3 and 6 above. -package merger - -import ( - "fmt" - "strings" - - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// Phase indicates which attributes should be merged in matching rules. -type Phase int - -const ( - // The pre-resolve merge is performed before rules are indexed for dependency - // resolution. All attributes not related to dependencies are merged - // (i.e., rule.KindInfo.MergeableAttrs). This merge must be performed - // before indexing because attributes related to indexing (e.g., - // srcs, importpath) will be affected. - PreResolve Phase = iota - - // The post-resolve merge is performed after rules are indexed. All attributes - // related to dependencies are merged (i.e., rule.KindInfo.ResolveAttrs). - PostResolve -) - -// MergeFile combines information from newly generated rules with matching -// rules in an existing build file. MergeFile can also delete rules which -// are empty after merging. -// -// oldFile is the file to merge. It must not be nil. -// -// emptyRules is a list of stub rules (with no attributes other than name) -// which were not generated. These are merged with matching rules. The merged -// rules are deleted if they contain no attributes that make them buildable -// (e.g., srcs, deps, anything in rule.KindInfo.NonEmptyAttrs). -// -// genRules is a list of newly generated rules. These are merged with -// matching rules. A rule matches if it has the same kind and name or if -// some other attribute in rule.KindInfo.MatchAttrs matches (e.g., -// "importpath" in go_library). Elements of genRules that don't match -// any existing rule are appended to the end of oldFile. -// -// phase indicates whether this is a pre- or post-resolve merge. Different -// attributes (rule.KindInfo.MergeableAttrs or ResolveAttrs) will be merged. -// -// kinds maps rule kinds (e.g., "go_library") to metadata that helps merge -// rules of that kind. -// -// When a generated and existing rule are merged, each attribute is merged -// separately. If an attribute is mergeable (according to KindInfo), values -// from the existing attribute are replaced by values from the generated -// attribute. Comments are preserved on values that are present in both -// versions of the attribute. If at attribute is not mergeable, the generated -// version of the attribute will be added if no existing attribute is present; -// otherwise, the existing attribute will be preserved. -// -// Note that "# keep" comments affect merging. If a value within an existing -// attribute is marked with a "# keep" comment, it will not be removed. -// If an attribute is marked with a "# keep" comment, it will not be merged. -// If a rule is marked with a "# keep" comment, the whole rule will not -// be modified. -func MergeFile(oldFile *rule.File, emptyRules, genRules []*rule.Rule, phase Phase, kinds map[string]rule.KindInfo) { - getMergeAttrs := func(r *rule.Rule) map[string]bool { - if phase == PreResolve { - return kinds[r.Kind()].MergeableAttrs - } else { - return kinds[r.Kind()].ResolveAttrs - } - } - - // Merge empty rules into the file and delete any rules which become empty. - for _, emptyRule := range emptyRules { - if oldRule, _ := match(oldFile.Rules, emptyRule, kinds[emptyRule.Kind()]); oldRule != nil { - if oldRule.ShouldKeep() { - continue - } - rule.MergeRules(emptyRule, oldRule, getMergeAttrs(emptyRule), oldFile.Path) - if oldRule.IsEmpty(kinds[oldRule.Kind()]) { - oldRule.Delete() - } - } - } - oldFile.Sync() - - // Match generated rules with existing rules in the file. Keep track of - // rules with non-standard names. - matchRules := make([]*rule.Rule, len(genRules)) - matchErrors := make([]error, len(genRules)) - substitutions := make(map[string]string) - for i, genRule := range genRules { - oldRule, err := match(oldFile.Rules, genRule, kinds[genRule.Kind()]) - if err != nil { - // TODO(jayconrod): add a verbose mode and log errors. They are too chatty - // to print by default. - matchErrors[i] = err - continue - } - matchRules[i] = oldRule - if oldRule != nil { - if oldRule.Name() != genRule.Name() { - substitutions[genRule.Name()] = oldRule.Name() - } - } - } - - // Rename labels in generated rules that refer to other generated rules. - if len(substitutions) > 0 { - for _, genRule := range genRules { - substituteRule(genRule, substitutions, kinds[genRule.Kind()]) - } - } - - // Merge generated rules with existing rules or append to the end of the file. - for i, genRule := range genRules { - if matchErrors[i] != nil { - continue - } - if matchRules[i] == nil { - genRule.Insert(oldFile) - } else { - rule.MergeRules(genRule, matchRules[i], getMergeAttrs(genRule), oldFile.Path) - } - } -} - -// substituteRule replaces local labels (those beginning with ":", referring to -// targets in the same package) according to a substitution map. This is used -// to update generated rules before merging when the corresponding existing -// rules have different names. If substituteRule replaces a string, it returns -// a new expression; it will not modify the original expression. -func substituteRule(r *rule.Rule, substitutions map[string]string, info rule.KindInfo) { - for attr := range info.SubstituteAttrs { - if expr := r.Attr(attr); expr != nil { - expr = rule.MapExprStrings(expr, func(s string) string { - if rename, ok := substitutions[strings.TrimPrefix(s, ":")]; ok { - return ":" + rename - } else { - return s - } - }) - r.SetAttr(attr, expr) - } - } -} - -// match searches for a rule that can be merged with x in rules. -// -// A rule is considered a match if its kind is equal to x's kind AND either its -// name is equal OR at least one of the attributes in matchAttrs is equal. -// -// If there are no matches, nil and nil are returned. -// -// If a rule has the same name but a different kind, nill and an error -// are returned. -// -// If there is exactly one match, the rule and nil are returned. -// -// If there are multiple matches, match will attempt to disambiguate, based on -// the quality of the match (name match is best, then attribute match in the -// order that attributes are listed). If disambiguation is successful, -// the rule and nil are returned. Otherwise, nil and an error are returned. -func match(rules []*rule.Rule, x *rule.Rule, info rule.KindInfo) (*rule.Rule, error) { - xname := x.Name() - xkind := x.Kind() - var nameMatches []*rule.Rule - var kindMatches []*rule.Rule - for _, y := range rules { - if xname == y.Name() { - nameMatches = append(nameMatches, y) - } - if xkind == y.Kind() { - kindMatches = append(kindMatches, y) - } - } - - if len(nameMatches) == 1 { - y := nameMatches[0] - if xkind != y.Kind() { - return nil, fmt.Errorf("could not merge %s(%s): a rule of the same name has kind %s", xkind, xname, y.Kind()) - } - return y, nil - } - if len(nameMatches) > 1 { - return nil, fmt.Errorf("could not merge %s(%s): multiple rules have the same name", xkind, xname) - } - - for _, key := range info.MatchAttrs { - var attrMatches []*rule.Rule - xvalue := x.AttrString(key) - if xvalue == "" { - continue - } - for _, y := range kindMatches { - if xvalue == y.AttrString(key) { - attrMatches = append(attrMatches, y) - } - } - if len(attrMatches) == 1 { - return attrMatches[0], nil - } else if len(attrMatches) > 1 { - return nil, fmt.Errorf("could not merge %s(%s): multiple rules have the same attribute %s = %q", xkind, xname, key, xvalue) - } - } - - if info.MatchAny { - if len(kindMatches) == 1 { - return kindMatches[0], nil - } else if len(kindMatches) > 1 { - return nil, fmt.Errorf("could not merge %s(%s): multiple rules have the same kind but different names", xkind, xname) - } - } - - return nil, nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/BUILD.bazel deleted file mode 100644 index b351e0bfa5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/BUILD.bazel +++ /dev/null @@ -1,9 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["path.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools", - importpath = "github.com/bazelbuild/bazel-gazelle/pathtools", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/path.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/path.go deleted file mode 100644 index c9c2c75b25..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/pathtools/path.go +++ /dev/null @@ -1,67 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package pathtools provides utilities for manipulating paths. Most paths -// within Gazelle are slash-separated paths, relative to the repository root -// directory. The repository root directory is represented by the empty -// string. Paths in this format may be used directly as package names in labels. -package pathtools - -import ( - "path" - "path/filepath" - "strings" -) - -// HasPrefix returns whether the slash-separated path p has the given -// prefix. Unlike strings.HasPrefix, this function respects component -// boundaries, so "/home/foo" is not a prefix is "/home/foobar/baz". If the -// prefix is empty, this function always returns true. -func HasPrefix(p, prefix string) bool { - return prefix == "" || p == prefix || strings.HasPrefix(p, prefix+"/") -} - -// TrimPrefix returns p without the provided prefix. If p doesn't start -// with prefix, it returns p unchanged. Unlike strings.HasPrefix, this function -// respects component boundaries (assuming slash-separated paths), so -// TrimPrefix("foo/bar", "foo") returns "baz". -func TrimPrefix(p, prefix string) string { - if prefix == "" { - return p - } - if prefix == p { - return "" - } - return strings.TrimPrefix(p, prefix+"/") -} - -// RelBaseName returns the base name for rel, a slash-separated path relative -// to the repository root. If rel is empty, RelBaseName returns the base name -// of prefix. If prefix is empty, RelBaseName returns the base name of root, -// the absolute file path of the repository root directory. If that's empty -// to, then RelBaseName returns "root". -func RelBaseName(rel, prefix, root string) string { - base := path.Base(rel) - if base == "." || base == "/" { - base = path.Base(prefix) - } - if base == "." || base == "/" { - base = filepath.Base(root) - } - if base == "." || base == "/" { - base = "root" - } - return base -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/BUILD.bazel deleted file mode 100644 index 64a8f9744f..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/BUILD.bazel +++ /dev/null @@ -1,21 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "dep.go", - "modules.go", - "remote.go", - "repo.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo", - importpath = "github.com/bazelbuild/bazel-gazelle/repo", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/pathtools:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - "//vendor/github.com/pelletier/go-toml:go_default_library", - "//vendor/golang.org/x/tools/go/vcs:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/dep.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/dep.go deleted file mode 100644 index 8f55fc2156..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/dep.go +++ /dev/null @@ -1,55 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package repo - -import ( - "io/ioutil" - - "github.com/bazelbuild/bazel-gazelle/label" - toml "github.com/pelletier/go-toml" -) - -type depLockFile struct { - Projects []depProject `toml:"projects"` -} - -type depProject struct { - Name string `toml:"name"` - Revision string `toml:"revision"` - Source string `toml:"source"` -} - -func importRepoRulesDep(filename string) ([]Repo, error) { - data, err := ioutil.ReadFile(filename) - if err != nil { - return nil, err - } - var file depLockFile - if err := toml.Unmarshal(data, &file); err != nil { - return nil, err - } - - var repos []Repo - for _, p := range file.Projects { - repos = append(repos, Repo{ - Name: label.ImportPathToBazelRepoName(p.Name), - GoPrefix: p.Name, - Commit: p.Revision, - Remote: p.Source, - }) - } - return repos, nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/modules.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/modules.go deleted file mode 100644 index 6811d1d7b2..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/modules.go +++ /dev/null @@ -1,145 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package repo - -import ( - "bytes" - "encoding/json" - "io" - "io/ioutil" - "os" - "os/exec" - "path/filepath" - "regexp" - "runtime" - "strings" - - "github.com/bazelbuild/bazel-gazelle/label" -) - -type module struct { - Path, Version string - Main bool -} - -var regexMixedVersioning = regexp.MustCompile(`^(.*?)-([0-9]{14})-([a-fA-F0-9]{12})$`) - -func toRepoRule(mod module) Repo { - var tag, commit string - - if gr := regexMixedVersioning.FindStringSubmatch(mod.Version); gr != nil { - commit = gr[3] - } else { - tag = strings.TrimSuffix(mod.Version, "+incompatible") - } - - return Repo{ - Name: label.ImportPathToBazelRepoName(mod.Path), - GoPrefix: mod.Path, - Commit: commit, - Tag: tag, - } -} - -func importRepoRulesModules(filename string) (repos []Repo, err error) { - tempDir, err := copyGoModToTemp(filename) - if err != nil { - return nil, err - } - defer os.RemoveAll(tempDir) - - data, err := goListModulesFn(tempDir) - if err != nil { - return nil, err - } - - dec := json.NewDecoder(bytes.NewReader(data)) - for dec.More() { - var mod module - if err := dec.Decode(&mod); err != nil { - return nil, err - } - if mod.Main { - continue - } - - repos = append(repos, toRepoRule(mod)) - } - - return repos, nil -} - -// goListModulesFn may be overridden by tests. -var goListModulesFn = goListModules - -// goListModules invokes "go list" in a directory containing a go.mod file. -func goListModules(dir string) ([]byte, error) { - goTool := findGoTool() - cmd := exec.Command(goTool, "list", "-m", "-json", "all") - cmd.Stderr = os.Stderr - cmd.Dir = dir - data, err := cmd.Output() - return data, err -} - -// copyGoModToTemp copies to given go.mod file to a temporary directory. -// go list tends to mutate go.mod files, but gazelle shouldn't do that. -func copyGoModToTemp(filename string) (tempDir string, err error) { - goModOrig, err := os.Open(filename) - if err != nil { - return "", err - } - defer goModOrig.Close() - - tempDir, err = ioutil.TempDir("", "gazelle-temp-gomod") - if err != nil { - return "", err - } - - goModCopy, err := os.Create(filepath.Join(tempDir, "go.mod")) - if err != nil { - os.Remove(tempDir) - return "", err - } - defer func() { - if cerr := goModCopy.Close(); err == nil && cerr != nil { - err = cerr - } - }() - - _, err = io.Copy(goModCopy, goModOrig) - if err != nil { - os.RemoveAll(tempDir) - return "", err - } - return tempDir, err -} - -// findGoTool attempts to locate the go executable. If GOROOT is set, we'll -// prefer the one in there; otherwise, we'll rely on PATH. If the wrapper -// script generated by the gazelle rule is invoked by Bazel, it will set -// GOROOT to the configured SDK. We don't want to rely on the host SDK in -// that situation. -func findGoTool() string { - path := "go" // rely on PATH by default - if goroot, ok := os.LookupEnv("GOROOT"); ok { - path = filepath.Join(goroot, "bin", "go") - } - if runtime.GOOS == "windows" { - path += ".exe" - } - return path -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/remote.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/remote.go deleted file mode 100644 index da5442ec71..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/remote.go +++ /dev/null @@ -1,332 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package repo - -import ( - "bytes" - "fmt" - "os/exec" - "path" - "regexp" - "strings" - "sync" - - "github.com/bazelbuild/bazel-gazelle/label" - "github.com/bazelbuild/bazel-gazelle/pathtools" - "golang.org/x/tools/go/vcs" -) - -// UpdateRepo returns an object describing a repository at the most recent -// commit or version tag. -// -// This function uses RemoteCache to retrieve information about the repository. -// Depending on how the RemoteCache was initialized and used earlier, some -// information may already be locally available. Frequently though, information -// will be fetched over the network, so this function may be slow. -func UpdateRepo(rc *RemoteCache, importPath string) (Repo, error) { - root, name, err := rc.Root(importPath) - if err != nil { - return Repo{}, err - } - remote, vcs, err := rc.Remote(root) - if err != nil { - return Repo{}, err - } - commit, tag, err := rc.Head(remote, vcs) - if err != nil { - return Repo{}, err - } - repo := Repo{ - Name: name, - GoPrefix: root, - Commit: commit, - Tag: tag, - Remote: remote, - VCS: vcs, - } - return repo, nil -} - -// RemoteCache stores information about external repositories. The cache may -// be initialized with information about known repositories, i.e., those listed -// in the WORKSPACE file and mentioned on the command line. Other information -// is retrieved over the network. -// -// Public methods of RemoteCache may be slow in cases where a network fetch -// is needed. Public methods may be called concurrently. -type RemoteCache struct { - // RepoRootForImportPath is vcs.RepoRootForImportPath by default. It may - // be overridden so that tests may avoid accessing the network. - RepoRootForImportPath func(string, bool) (*vcs.RepoRoot, error) - - // HeadCmd returns the latest commit on the default branch in the given - // repository. This is used by Head. It may be stubbed out for tests. - HeadCmd func(remote, vcs string) (string, error) - - root, remote, head remoteCacheMap -} - -// remoteCacheMap is a thread-safe, idempotent cache. It is used to store -// information which should be fetched over the network no more than once. -// This follows the Memo pattern described in The Go Programming Language, -// section 9.7. -type remoteCacheMap struct { - mu sync.Mutex - cache map[string]*remoteCacheEntry -} - -type remoteCacheEntry struct { - value interface{} - err error - - // ready is nil for entries that were added when the cache was initialized. - // It is non-nil for other entries. It is closed when an entry is ready, - // i.e., the operation loading the entry completed. - ready chan struct{} -} - -type rootValue struct { - root, name string -} - -type remoteValue struct { - remote, vcs string -} - -type headValue struct { - commit, tag string -} - -// NewRemoteCache creates a new RemoteCache with a set of known repositories. -// The Root and Remote methods will return information about repositories listed -// here without accessing the network. However, the Head method will still -// access the network for these repositories to retrieve information about new -// versions. -func NewRemoteCache(knownRepos []Repo) *RemoteCache { - r := &RemoteCache{ - RepoRootForImportPath: vcs.RepoRootForImportPath, - HeadCmd: defaultHeadCmd, - root: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)}, - remote: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)}, - head: remoteCacheMap{cache: make(map[string]*remoteCacheEntry)}, - } - for _, repo := range knownRepos { - r.root.cache[repo.GoPrefix] = &remoteCacheEntry{ - value: rootValue{ - root: repo.GoPrefix, - name: repo.Name, - }, - } - if repo.Remote != "" { - r.remote.cache[repo.GoPrefix] = &remoteCacheEntry{ - value: remoteValue{ - remote: repo.Remote, - vcs: repo.VCS, - }, - } - } - } - return r -} - -var gopkginPattern = regexp.MustCompile("^(gopkg.in/(?:[^/]+/)?[^/]+\\.v\\d+)(?:/|$)") - -var knownPrefixes = []struct { - prefix string - missing int -}{ - {prefix: "golang.org/x", missing: 1}, - {prefix: "google.golang.org", missing: 1}, - {prefix: "cloud.google.com", missing: 1}, - {prefix: "github.com", missing: 2}, -} - -// Root returns the portion of an import path that corresponds to the root -// directory of the repository containing the given import path. For example, -// given "golang.org/x/tools/go/loader", this will return "golang.org/x/tools". -// The workspace name of the repository is also returned. This may be a custom -// name set in WORKSPACE, or it may be a generated name based on the root path. -func (r *RemoteCache) Root(importPath string) (root, name string, err error) { - // Try prefixes of the import path in the cache, but don't actually go out - // to vcs yet. We do this before handling known special cases because - // the cache is pre-populated with repository rules, and we want to use their - // names if we can. - prefix := importPath - for { - v, ok, err := r.root.get(prefix) - if ok { - if err != nil { - return "", "", err - } - value := v.(rootValue) - return value.root, value.name, nil - } - - prefix = path.Dir(prefix) - if prefix == "." || prefix == "/" { - break - } - } - - // Try known prefixes. - for _, p := range knownPrefixes { - if pathtools.HasPrefix(importPath, p.prefix) { - rest := pathtools.TrimPrefix(importPath, p.prefix) - var components []string - if rest != "" { - components = strings.Split(rest, "/") - } - if len(components) < p.missing { - return "", "", fmt.Errorf("import path %q is shorter than the known prefix %q", importPath, p.prefix) - } - root = p.prefix - for _, c := range components[:p.missing] { - root = path.Join(root, c) - } - name = label.ImportPathToBazelRepoName(root) - return root, name, nil - } - } - - // gopkg.in is special, and might have either one or two levels of - // missing paths. See http://labix.org/gopkg.in for URL patterns. - if match := gopkginPattern.FindStringSubmatch(importPath); len(match) > 0 { - root = match[1] - name = label.ImportPathToBazelRepoName(root) - return root, name, nil - } - - // Find the prefix using vcs and cache the result. - v, err := r.root.ensure(importPath, func() (interface{}, error) { - res, err := r.RepoRootForImportPath(importPath, false) - if err != nil { - return nil, err - } - return rootValue{res.Root, label.ImportPathToBazelRepoName(res.Root)}, nil - }) - if err != nil { - return "", "", err - } - value := v.(rootValue) - return value.root, value.name, nil -} - -// Remote returns the VCS name and the remote URL for a repository with the -// given root import path. This is suitable for creating new repository rules. -func (r *RemoteCache) Remote(root string) (remote, vcs string, err error) { - v, err := r.remote.ensure(root, func() (interface{}, error) { - repo, err := r.RepoRootForImportPath(root, false) - if err != nil { - return nil, err - } - return remoteValue{remote: repo.Repo, vcs: repo.VCS.Cmd}, nil - }) - if err != nil { - return "", "", err - } - value := v.(remoteValue) - return value.remote, value.vcs, nil -} - -// Head returns the most recent commit id on the default branch and latest -// version tag for the given remote repository. The tag "" is returned if -// no latest version was found. -// -// TODO(jayconrod): support VCS other than git. -// TODO(jayconrod): support version tags. "" is always returned. -func (r *RemoteCache) Head(remote, vcs string) (commit, tag string, err error) { - if vcs != "git" { - return "", "", fmt.Errorf("could not locate recent commit in repo %q with unknown version control scheme %q", remote, vcs) - } - - v, err := r.head.ensure(remote, func() (interface{}, error) { - commit, err := r.HeadCmd(remote, vcs) - if err != nil { - return nil, err - } - return headValue{commit: commit}, nil - }) - if err != nil { - return "", "", err - } - value := v.(headValue) - return value.commit, value.tag, nil -} - -func defaultHeadCmd(remote, vcs string) (string, error) { - switch vcs { - case "local": - return "", nil - - case "git": - // Old versions of git ls-remote exit with code 129 when "--" is passed. - // We'll try to validate the argument here instead. - if strings.HasPrefix(remote, "-") { - return "", fmt.Errorf("remote must not start with '-': %q", remote) - } - cmd := exec.Command("git", "ls-remote", remote, "HEAD") - out, err := cmd.Output() - if err != nil { - return "", err - } - ix := bytes.IndexByte(out, '\t') - if ix < 0 { - return "", fmt.Errorf("could not parse output for git ls-remote for %q", remote) - } - return string(out[:ix]), nil - - default: - return "", fmt.Errorf("unknown version control system: %s", vcs) - } -} - -// get retrieves a value associated with the given key from the cache. ok will -// be true if the key exists in the cache, even if it's in the process of -// being fetched. -func (m *remoteCacheMap) get(key string) (value interface{}, ok bool, err error) { - m.mu.Lock() - e, ok := m.cache[key] - m.mu.Unlock() - if !ok { - return nil, ok, nil - } - if e.ready != nil { - <-e.ready - } - return e.value, ok, e.err -} - -// ensure retreives a value associated with the given key from the cache. If -// the key does not exist in the cache, the load function will be called, -// and its result will be associated with the key. The load function will not -// be called more than once for any key. -func (m *remoteCacheMap) ensure(key string, load func() (interface{}, error)) (interface{}, error) { - m.mu.Lock() - e, ok := m.cache[key] - if !ok { - e = &remoteCacheEntry{ready: make(chan struct{})} - m.cache[key] = e - m.mu.Unlock() - e.value, e.err = load() - close(e.ready) - } else { - m.mu.Unlock() - if e.ready != nil { - <-e.ready - } - } - return e.value, e.err -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/repo.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/repo.go deleted file mode 100644 index 3ccb62248c..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/repo/repo.go +++ /dev/null @@ -1,199 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package repo - -import ( - "fmt" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// Repo describes an external repository rule declared in a Bazel -// WORKSPACE file. -type Repo struct { - // Name is the value of the "name" attribute of the repository rule. - Name string - - // GoPrefix is the portion of the Go import path for the root of this - // repository. Usually the same as Remote. - GoPrefix string - - // Commit is the revision at which a repository is checked out (for example, - // a Git commit id). - Commit string - - // Tag is the name of the version at which a repository is checked out. - Tag string - - // Remote is the URL the repository can be cloned or checked out from. - Remote string - - // VCS is the version control system used to check out the repository. - // May also be "http" for HTTP archives. - VCS string -} - -type byName []Repo - -func (s byName) Len() int { return len(s) } -func (s byName) Less(i, j int) bool { return s[i].Name < s[j].Name } -func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -type lockFileFormat int - -const ( - unknownFormat lockFileFormat = iota - depFormat - moduleFormat -) - -var lockFileParsers = map[lockFileFormat]func(string) ([]Repo, error){ - depFormat: importRepoRulesDep, - moduleFormat: importRepoRulesModules, -} - -// ImportRepoRules reads the lock file of a vendoring tool and returns -// a list of equivalent repository rules that can be merged into a WORKSPACE -// file. The format of the file is inferred from its basename. Currently, -// only Gopkg.lock is supported. -func ImportRepoRules(filename string) ([]*rule.Rule, error) { - format := getLockFileFormat(filename) - if format == unknownFormat { - return nil, fmt.Errorf(`%s: unrecognized lock file format. Expected "Gopkg.lock"`, filename) - } - parser := lockFileParsers[format] - repos, err := parser(filename) - if err != nil { - return nil, fmt.Errorf("error parsing %q: %v", filename, err) - } - sort.Stable(byName(repos)) - - rules := make([]*rule.Rule, 0, len(repos)) - for _, repo := range repos { - rules = append(rules, GenerateRule(repo)) - } - return rules, nil -} - -func getLockFileFormat(filename string) lockFileFormat { - switch filepath.Base(filename) { - case "Gopkg.lock": - return depFormat - case "go.mod": - return moduleFormat - default: - return unknownFormat - } -} - -// GenerateRule returns a repository rule for the given repository that can -// be written in a WORKSPACE file. -func GenerateRule(repo Repo) *rule.Rule { - r := rule.NewRule("go_repository", repo.Name) - if repo.Commit != "" { - r.SetAttr("commit", repo.Commit) - } - if repo.Tag != "" { - r.SetAttr("tag", repo.Tag) - } - r.SetAttr("importpath", repo.GoPrefix) - if repo.Remote != "" { - r.SetAttr("remote", repo.Remote) - } - if repo.VCS != "" { - r.SetAttr("vcs", repo.VCS) - } - return r -} - -// FindExternalRepo attempts to locate the directory where Bazel has fetched -// the external repository with the given name. An error is returned if the -// repository directory cannot be located. -func FindExternalRepo(repoRoot, name string) (string, error) { - // See https://docs.bazel.build/versions/master/output_directories.html - // for documentation on Bazel directory layout. - // We expect the bazel-out symlink in the workspace root directory to point to - // /execroot//bazel-out - // We expect the external repository to be checked out at - // /external/ - // Note that users can change the prefix for most of the Bazel symlinks with - // --symlink_prefix, but this does not include bazel-out. - externalPath := strings.Join([]string{repoRoot, "bazel-out", "..", "..", "..", "external", name}, string(os.PathSeparator)) - cleanPath, err := filepath.EvalSymlinks(externalPath) - if err != nil { - return "", err - } - st, err := os.Stat(cleanPath) - if err != nil { - return "", err - } - if !st.IsDir() { - return "", fmt.Errorf("%s: not a directory", externalPath) - } - return cleanPath, nil -} - -// ListRepositories extracts metadata about repositories declared in a -// WORKSPACE file. -// -// The set of repositories returned is necessarily incomplete, since we don't -// evaluate the file, and repositories may be declared in macros in other files. -func ListRepositories(workspace *rule.File) []Repo { - var repos []Repo - for _, r := range workspace.Rules { - name := r.Name() - if name == "" { - continue - } - var repo Repo - switch r.Kind() { - case "go_repository": - // TODO(jayconrod): extract other fields needed by go_repository. - // Currently, we don't use the result of this function to produce new - // go_repository rules, so it doesn't matter. - goPrefix := r.AttrString("importpath") - revision := r.AttrString("commit") - remote := r.AttrString("remote") - vcs := r.AttrString("vcs") - if goPrefix == "" { - continue - } - repo = Repo{ - Name: name, - GoPrefix: goPrefix, - Commit: revision, - Remote: remote, - VCS: vcs, - } - - // TODO(jayconrod): infer from {new_,}git_repository, {new_,}http_archive, - // local_repository. - - default: - continue - } - repos = append(repos, repo) - } - - // TODO(jayconrod): look for directives that describe repositories that - // aren't declared in the top-level of WORKSPACE (e.g., behind a macro). - - return repos -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/BUILD.bazel deleted file mode 100644 index 372bdd9be2..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/BUILD.bazel +++ /dev/null @@ -1,18 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "config.go", - "index.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve", - importpath = "github.com/bazelbuild/bazel-gazelle/resolve", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/config:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/repo:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/config.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/config.go deleted file mode 100644 index 27d38955c5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/config.go +++ /dev/null @@ -1,115 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package resolve - -import ( - "flag" - "log" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/label" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// FindRuleWithOverride searches the current configuration for user-specified -// dependency resolution overrides. Overrides specified later (in configuration -// files in deeper directories, or closer to the end of the file) are -// returned first. If no override is found, label.NoLabel is returned. -func FindRuleWithOverride(c *config.Config, imp ImportSpec, lang string) (label.Label, bool) { - rc := getResolveConfig(c) - for i := len(rc.overrides) - 1; i >= 0; i-- { - o := rc.overrides[i] - if o.matches(imp, lang) { - return o.dep, true - } - } - return label.NoLabel, false -} - -type overrideSpec struct { - imp ImportSpec - lang string - dep label.Label -} - -func (o overrideSpec) matches(imp ImportSpec, lang string) bool { - return imp.Lang == o.imp.Lang && - imp.Imp == o.imp.Imp && - (o.lang == "" || o.lang == lang) -} - -type resolveConfig struct { - overrides []overrideSpec -} - -const resolveName = "_resolve" - -func getResolveConfig(c *config.Config) *resolveConfig { - return c.Exts[resolveName].(*resolveConfig) -} - -type Configurer struct{} - -func (_ *Configurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) { - c.Exts[resolveName] = &resolveConfig{} -} - -func (_ *Configurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error { return nil } - -func (_ *Configurer) KnownDirectives() []string { - return []string{"resolve"} -} - -func (_ *Configurer) Configure(c *config.Config, rel string, f *rule.File) { - rc := getResolveConfig(c) - rcCopy := &resolveConfig{ - overrides: rc.overrides[:], - } - - if f != nil { - for _, d := range f.Directives { - if d.Key == "resolve" { - parts := strings.Fields(d.Value) - o := overrideSpec{} - var lbl string - if len(parts) == 3 { - o.imp.Lang = parts[0] - o.imp.Imp = parts[1] - lbl = parts[2] - } else if len(parts) == 4 { - o.imp.Lang = parts[0] - o.lang = parts[1] - o.imp.Imp = parts[2] - lbl = parts[3] - } else { - log.Printf("could not parse directive: %s\n\texpected gazelle:resolve source-language [import-language] import-string label", d.Value) - continue - } - var err error - o.dep, err = label.Parse(lbl) - if err != nil { - log.Printf("gazelle:resolve %s: %v", d.Value, err) - continue - } - o.dep = o.dep.Abs("", rel) - rcCopy.overrides = append(rcCopy.overrides, o) - } - } - } - - c.Exts[resolveName] = rcCopy -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/index.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/index.go deleted file mode 100644 index 62af5c88b5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/resolve/index.go +++ /dev/null @@ -1,243 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package resolve - -import ( - "log" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/label" - "github.com/bazelbuild/bazel-gazelle/repo" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// ImportSpec describes a library to be imported. Imp is an import string for -// the library. Lang is the language in which the import string appears (this -// should match Resolver.Name). -type ImportSpec struct { - Lang, Imp string -} - -// Resolver is an interface that language extensions can implement to resolve -// dependencies in rules they generate. -type Resolver interface { - // Name returns the name of the language. This should be a prefix of the - // kinds of rules generated by the language, e.g., "go" for the Go extension - // since it generates "go_library" rules. - Name() string - - // Imports returns a list of ImportSpecs that can be used to import the rule - // r. This is used to populate RuleIndex. - // - // If nil is returned, the rule will not be indexed. If any non-nil slice is - // returned, including an empty slice, the rule will be indexed. - Imports(c *config.Config, r *rule.Rule, f *rule.File) []ImportSpec - - // Embeds returns a list of labels of rules that the given rule embeds. If - // a rule is embedded by another importable rule of the same language, only - // the embedding rule will be indexed. The embedding rule will inherit - // the imports of the embedded rule. - Embeds(r *rule.Rule, from label.Label) []label.Label - - // Resolve translates imported libraries for a given rule into Bazel - // dependencies. Information about imported libraries is returned for each - // rule generated by language.GenerateRules in - // language.GenerateResult.Imports. Resolve generates a "deps" attribute (or - // the appropriate language-specific equivalent) for each import according to - // language-specific rules and heuristics. - Resolve(c *config.Config, ix *RuleIndex, rc *repo.RemoteCache, r *rule.Rule, imports interface{}, from label.Label) -} - -// RuleIndex is a table of rules in a workspace, indexed by label and by -// import path. Used by Resolver to map import paths to labels. -type RuleIndex struct { - rules []*ruleRecord - labelMap map[label.Label]*ruleRecord - importMap map[ImportSpec][]*ruleRecord - kindToResolver map[string]Resolver -} - -// ruleRecord contains information about a rule relevant to import indexing. -type ruleRecord struct { - rule *rule.Rule - label label.Label - - // importedAs is a list of ImportSpecs by which this rule may be imported. - // Used to build a map from ImportSpecs to ruleRecords. - importedAs []ImportSpec - - // embeds is the transitive closure of labels for rules that this rule embeds - // (as determined by the Embeds method). This only includes rules in the same - // language (i.e., it includes a go_library embedding a go_proto_library, but - // not a go_proto_library embedding a proto_library). - embeds []label.Label - - // embedded indicates whether another rule of the same language embeds this - // rule. Embedded rules should not be indexed. - embedded bool - - didCollectEmbeds bool -} - -// NewRuleIndex creates a new index. -// -// kindToResolver is a map from rule kinds (for example, "go_library") to -// Resolvers that support those kinds. -func NewRuleIndex(kindToResolver map[string]Resolver) *RuleIndex { - return &RuleIndex{ - labelMap: make(map[label.Label]*ruleRecord), - kindToResolver: kindToResolver, - } -} - -// AddRule adds a rule r to the index. The rule will only be indexed if there -// is a known resolver for the rule's kind and Resolver.Imports returns a -// non-nil slice. -// -// AddRule may only be called before Finish. -func (ix *RuleIndex) AddRule(c *config.Config, r *rule.Rule, f *rule.File) { - var imps []ImportSpec - if rslv, ok := ix.kindToResolver[r.Kind()]; ok { - imps = rslv.Imports(c, r, f) - } - // If imps == nil, the rule is not importable. If imps is the empty slice, - // it may still be importable if it embeds importable libraries. - if imps == nil { - return - } - - record := &ruleRecord{ - rule: r, - label: label.New(c.RepoName, f.Pkg, r.Name()), - importedAs: imps, - } - if _, ok := ix.labelMap[record.label]; ok { - log.Printf("multiple rules found with label %s", record.label) - return - } - ix.rules = append(ix.rules, record) - ix.labelMap[record.label] = record -} - -// Finish constructs the import index and performs any other necessary indexing -// actions after all rules have been added. This step is necessary because -// a rule may be indexed differently based on what rules are added later. -// -// Finish must be called after all AddRule calls and before any -// FindRulesByImport calls. -func (ix *RuleIndex) Finish() { - for _, r := range ix.rules { - ix.collectEmbeds(r) - } - ix.buildImportIndex() -} - -func (ix *RuleIndex) collectEmbeds(r *ruleRecord) { - if r.didCollectEmbeds { - return - } - r.didCollectEmbeds = true - embedLabels := ix.kindToResolver[r.rule.Kind()].Embeds(r.rule, r.label) - r.embeds = embedLabels - for _, e := range embedLabels { - er, ok := ix.findRuleByLabel(e, r.label) - if !ok { - continue - } - ix.collectEmbeds(er) - if ix.kindToResolver[r.rule.Kind()] == ix.kindToResolver[er.rule.Kind()] { - er.embedded = true - r.embeds = append(r.embeds, er.embeds...) - } - r.importedAs = append(r.importedAs, er.importedAs...) - } -} - -// buildImportIndex constructs the map used by FindRulesByImport. -func (ix *RuleIndex) buildImportIndex() { - ix.importMap = make(map[ImportSpec][]*ruleRecord) - for _, r := range ix.rules { - if r.embedded { - continue - } - indexed := make(map[ImportSpec]bool) - for _, imp := range r.importedAs { - if indexed[imp] { - continue - } - indexed[imp] = true - ix.importMap[imp] = append(ix.importMap[imp], r) - } - } -} - -func (ix *RuleIndex) findRuleByLabel(label label.Label, from label.Label) (*ruleRecord, bool) { - label = label.Abs(from.Repo, from.Pkg) - r, ok := ix.labelMap[label] - return r, ok -} - -type FindResult struct { - // Label is the absolute label (including repository and package name) for - // a matched rule. - Label label.Label - - // Embeds is the transitive closure of labels for rules that the matched - // rule embeds. It may contains duplicates and does not include the label - // for the rule itself. - Embeds []label.Label -} - -// FindRulesByImport attempts to resolve an import string to a rule record. -// imp is the import to resolve (which includes the target language). lang is -// the language of the rule with the dependency (for example, in -// go_proto_library, imp will have ProtoLang and lang will be GoLang). -// from is the rule which is doing the dependency. This is used to check -// vendoring visibility and to check for self-imports. -// -// FindRulesByImport returns a list of rules, since any number of rules may -// provide the same import. Callers may need to resolve ambiguities using -// language-specific heuristics. -func (ix *RuleIndex) FindRulesByImport(imp ImportSpec, lang string) []FindResult { - matches := ix.importMap[imp] - results := make([]FindResult, 0, len(matches)) - for _, m := range matches { - if ix.kindToResolver[m.rule.Kind()].Name() != lang { - continue - } - results = append(results, FindResult{ - Label: m.label, - Embeds: m.embeds, - }) - } - return results -} - -// IsSelfImport returns true if the result's label matches the given label -// or the result's rule transitively embeds the rule with the given label. -// Self imports cause cyclic dependencies, so the caller may want to omit -// the dependency or report an error. -func (r FindResult) IsSelfImport(from label.Label) bool { - if from.Equal(r.Label) { - return true - } - for _, e := range r.Embeds { - if from.Equal(e) { - return true - } - } - return false -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/BUILD.bazel deleted file mode 100644 index 468d2504ec..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/BUILD.bazel +++ /dev/null @@ -1,24 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "directives.go", - "expr.go", - "merge.go", - "platform.go", - "platform_strings.go", - "rule.go", - "sort_labels.go", - "types.go", - "value.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule", - importpath = "github.com/bazelbuild/bazel-gazelle/rule", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/label:go_default_library", - "//vendor/github.com/bazelbuild/buildtools/build:go_default_library", - "//vendor/github.com/bazelbuild/buildtools/tables:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/directives.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/directives.go deleted file mode 100644 index 9c61ca65be..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/directives.go +++ /dev/null @@ -1,64 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "regexp" - - bzl "github.com/bazelbuild/buildtools/build" -) - -// Directive is a key-value pair extracted from a top-level comment in -// a build file. Directives have the following format: -// -// # gazelle:key value -// -// Keys may not contain spaces. Values may be empty and may contain spaces, -// but surrounding space is trimmed. -type Directive struct { - Key, Value string -} - -// TODO(jayconrod): annotation directives will apply to an individual rule. -// They must appear in the block of comments above that rule. - -// ParseDirectives scans f for Gazelle directives. The full list of directives -// is returned. Errors are reported for unrecognized directives and directives -// out of place (after the first statement). -func ParseDirectives(f *bzl.File) []Directive { - var directives []Directive - parseComment := func(com bzl.Comment) { - match := directiveRe.FindStringSubmatch(com.Token) - if match == nil { - return - } - key, value := match[1], match[2] - directives = append(directives, Directive{key, value}) - } - - for _, s := range f.Stmt { - coms := s.Comment() - for _, com := range coms.Before { - parseComment(com) - } - for _, com := range coms.After { - parseComment(com) - } - } - return directives -} - -var directiveRe = regexp.MustCompile(`^#\s*gazelle:(\w+)\s*(.*?)\s*$`) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/expr.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/expr.go deleted file mode 100644 index 89d6e186cb..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/expr.go +++ /dev/null @@ -1,354 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "fmt" - "log" - "strings" - - "github.com/bazelbuild/bazel-gazelle/label" - bzl "github.com/bazelbuild/buildtools/build" -) - -// MapExprStrings applies a function to string sub-expressions within e. -// An expression containing the results with the same structure as e is -// returned. -func MapExprStrings(e bzl.Expr, f func(string) string) bzl.Expr { - if e == nil { - return nil - } - switch expr := e.(type) { - case *bzl.StringExpr: - s := f(expr.Value) - if s == "" { - return nil - } - ret := *expr - ret.Value = s - return &ret - - case *bzl.ListExpr: - var list []bzl.Expr - for _, elem := range expr.List { - elem = MapExprStrings(elem, f) - if elem != nil { - list = append(list, elem) - } - } - if len(list) == 0 && len(expr.List) > 0 { - return nil - } - ret := *expr - ret.List = list - return &ret - - case *bzl.DictExpr: - var cases []bzl.Expr - isEmpty := true - for _, kv := range expr.List { - keyval, ok := kv.(*bzl.KeyValueExpr) - if !ok { - log.Panicf("unexpected expression in generated imports dict: %#v", kv) - } - value := MapExprStrings(keyval.Value, f) - if value != nil { - cases = append(cases, &bzl.KeyValueExpr{Key: keyval.Key, Value: value}) - if key, ok := keyval.Key.(*bzl.StringExpr); !ok || key.Value != "//conditions:default" { - isEmpty = false - } - } - } - if isEmpty { - return nil - } - ret := *expr - ret.List = cases - return &ret - - case *bzl.CallExpr: - if x, ok := expr.X.(*bzl.LiteralExpr); !ok || x.Token != "select" || len(expr.List) != 1 { - log.Panicf("unexpected call expression in generated imports: %#v", e) - } - arg := MapExprStrings(expr.List[0], f) - if arg == nil { - return nil - } - call := *expr - call.List[0] = arg - return &call - - case *bzl.BinaryExpr: - x := MapExprStrings(expr.X, f) - y := MapExprStrings(expr.Y, f) - if x == nil { - return y - } - if y == nil { - return x - } - binop := *expr - binop.X = x - binop.Y = y - return &binop - - default: - return nil - } -} - -// FlattenExpr takes an expression that may have been generated from -// PlatformStrings and returns its values in a flat, sorted, de-duplicated -// list. Comments are accumulated and de-duplicated across duplicate -// expressions. If the expression could not have been generted by -// PlatformStrings, the expression will be returned unmodified. -func FlattenExpr(e bzl.Expr) bzl.Expr { - ps, err := extractPlatformStringsExprs(e) - if err != nil { - return e - } - - ls := makeListSquasher() - addElem := func(e bzl.Expr) bool { - s, ok := e.(*bzl.StringExpr) - if !ok { - return false - } - ls.add(s) - return true - } - addList := func(e bzl.Expr) bool { - l, ok := e.(*bzl.ListExpr) - if !ok { - return false - } - for _, elem := range l.List { - if !addElem(elem) { - return false - } - } - return true - } - addDict := func(d *bzl.DictExpr) bool { - for _, kv := range d.List { - if !addList(kv.(*bzl.KeyValueExpr).Value) { - return false - } - } - return true - } - - if ps.generic != nil { - if !addList(ps.generic) { - return e - } - } - for _, d := range []*bzl.DictExpr{ps.os, ps.arch, ps.platform} { - if d == nil { - continue - } - if !addDict(d) { - return e - } - } - - return ls.list() -} - -func isScalar(e bzl.Expr) bool { - switch e.(type) { - case *bzl.StringExpr, *bzl.LiteralExpr: - return true - default: - return false - } -} - -func dictEntryKeyValue(e bzl.Expr) (string, *bzl.ListExpr, error) { - kv, ok := e.(*bzl.KeyValueExpr) - if !ok { - return "", nil, fmt.Errorf("dict entry was not a key-value pair: %#v", e) - } - k, ok := kv.Key.(*bzl.StringExpr) - if !ok { - return "", nil, fmt.Errorf("dict key was not string: %#v", kv.Key) - } - v, ok := kv.Value.(*bzl.ListExpr) - if !ok { - return "", nil, fmt.Errorf("dict value was not list: %#v", kv.Value) - } - return k.Value, v, nil -} - -func stringValue(e bzl.Expr) string { - s, ok := e.(*bzl.StringExpr) - if !ok { - return "" - } - return s.Value -} - -// platformStringsExprs is a set of sub-expressions that match the structure -// of package.PlatformStrings. ExprFromValue produces expressions that -// follow this structure for srcs, deps, and other attributes, so this matches -// all non-scalar expressions generated by Gazelle. -// -// The matched expression has the form: -// -// [] + select({}) + select({}) + select({}) -// -// The four collections may appear in any order, and some or all of them may -// be omitted (all fields are nil for a nil expression). -type platformStringsExprs struct { - generic *bzl.ListExpr - os, arch, platform *bzl.DictExpr -} - -// extractPlatformStringsExprs matches an expression and attempts to extract -// sub-expressions in platformStringsExprs. The sub-expressions can then be -// merged with corresponding sub-expressions. Any field in the returned -// structure may be nil. An error is returned if the given expression does -// not follow the pattern described by platformStringsExprs. -func extractPlatformStringsExprs(expr bzl.Expr) (platformStringsExprs, error) { - var ps platformStringsExprs - if expr == nil { - return ps, nil - } - - // Break the expression into a sequence of expressions combined with +. - var parts []bzl.Expr - for { - binop, ok := expr.(*bzl.BinaryExpr) - if !ok { - parts = append(parts, expr) - break - } - parts = append(parts, binop.Y) - expr = binop.X - } - - // Process each part. They may be in any order. - for _, part := range parts { - switch part := part.(type) { - case *bzl.ListExpr: - if ps.generic != nil { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple list expressions") - } - ps.generic = part - - case *bzl.CallExpr: - x, ok := part.X.(*bzl.LiteralExpr) - if !ok || x.Token != "select" || len(part.List) != 1 { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: callee other than select or wrong number of args") - } - arg, ok := part.List[0].(*bzl.DictExpr) - if !ok { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: select argument not dict") - } - var dict **bzl.DictExpr - for _, item := range arg.List { - kv := item.(*bzl.KeyValueExpr) // parser guarantees this - k, ok := kv.Key.(*bzl.StringExpr) - if !ok { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict keys are not all strings") - } - if k.Value == "//conditions:default" { - continue - } - key, err := label.Parse(k.Value) - if err != nil { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key is not label: %q", k.Value) - } - if KnownOSSet[key.Name] { - dict = &ps.os - break - } - if KnownArchSet[key.Name] { - dict = &ps.arch - break - } - osArch := strings.Split(key.Name, "_") - if len(osArch) != 2 || !KnownOSSet[osArch[0]] || !KnownArchSet[osArch[1]] { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key contains unknown platform: %q", k.Value) - } - dict = &ps.platform - break - } - if dict == nil { - // We could not identify the dict because it's empty or only contains - // //conditions:default. We'll call it the platform dict to avoid - // dropping it. - dict = &ps.platform - } - if *dict != nil { - return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple selects that are either os-specific, arch-specific, or platform-specific") - } - *dict = arg - } - } - return ps, nil -} - -// makePlatformStringsExpr constructs a single expression from the -// sub-expressions in ps. -func makePlatformStringsExpr(ps platformStringsExprs) bzl.Expr { - makeSelect := func(dict *bzl.DictExpr) bzl.Expr { - return &bzl.CallExpr{ - X: &bzl.LiteralExpr{Token: "select"}, - List: []bzl.Expr{dict}, - } - } - forceMultiline := func(e bzl.Expr) { - switch e := e.(type) { - case *bzl.ListExpr: - e.ForceMultiLine = true - case *bzl.CallExpr: - e.List[0].(*bzl.DictExpr).ForceMultiLine = true - } - } - - var parts []bzl.Expr - if ps.generic != nil { - parts = append(parts, ps.generic) - } - if ps.os != nil { - parts = append(parts, makeSelect(ps.os)) - } - if ps.arch != nil { - parts = append(parts, makeSelect(ps.arch)) - } - if ps.platform != nil { - parts = append(parts, makeSelect(ps.platform)) - } - - if len(parts) == 0 { - return nil - } - if len(parts) == 1 { - return parts[0] - } - expr := parts[0] - forceMultiline(expr) - for _, part := range parts[1:] { - forceMultiline(part) - expr = &bzl.BinaryExpr{ - Op: "+", - X: expr, - Y: part, - } - } - return expr -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/merge.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/merge.go deleted file mode 100644 index 0bc30c7189..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/merge.go +++ /dev/null @@ -1,489 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "errors" - "fmt" - "log" - "sort" - - bzl "github.com/bazelbuild/buildtools/build" -) - -// MergeRules copies information from src into dst, usually discarding -// information in dst when they have the same attributes. -// -// If dst is marked with a "# keep" comment, either above the rule or as -// a suffix, nothing will be changed. -// -// If src has an attribute that is not in dst, it will be copied into dst. -// -// If src and dst have the same attribute and the attribute is mergeable and the -// attribute in dst is not marked with a "# keep" comment, values in the dst -// attribute not marked with a "# keep" comment will be dropped, and values from -// src will be copied in. -// -// If dst has an attribute not in src, and the attribute is mergeable and not -// marked with a "# keep" comment, values in the attribute not marked with -// a "# keep" comment will be dropped. If the attribute is empty afterward, -// it will be deleted. -func MergeRules(src, dst *Rule, mergeable map[string]bool, filename string) { - if dst.ShouldKeep() { - return - } - - // Process attributes that are in dst but not in src. - for key, dstAttr := range dst.attrs { - if _, ok := src.attrs[key]; ok || !mergeable[key] || ShouldKeep(dstAttr) { - continue - } - dstValue := dstAttr.Y - if mergedValue, err := mergeExprs(nil, dstValue); err != nil { - start, end := dstValue.Span() - log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune) - } else if mergedValue == nil { - dst.DelAttr(key) - } else { - dst.SetAttr(key, mergedValue) - } - } - - // Merge attributes from src into dst. - for key, srcAttr := range src.attrs { - srcValue := srcAttr.Y - if dstAttr, ok := dst.attrs[key]; !ok { - dst.SetAttr(key, srcValue) - } else if mergeable[key] && !ShouldKeep(dstAttr) { - dstValue := dstAttr.Y - if mergedValue, err := mergeExprs(srcValue, dstValue); err != nil { - start, end := dstValue.Span() - log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune) - } else { - dst.SetAttr(key, mergedValue) - } - } - } -} - -// mergeExprs combines information from src and dst and returns a merged -// expression. dst may be modified during this process. The returned expression -// may be different from dst when a structural change is needed. -// -// The following kinds of expressions are recognized. -// -// * nil -// * strings (can only be merged with strings) -// * lists of strings -// * a call to select with a dict argument. The dict keys must be strings, -// and the values must be lists of strings. -// * a list of strings combined with a select call using +. The list must -// be the left operand. -// -// An error is returned if the expressions can't be merged, for example -// because they are not in one of the above formats. -func mergeExprs(src, dst bzl.Expr) (bzl.Expr, error) { - if ShouldKeep(dst) { - return nil, nil - } - if src == nil && (dst == nil || isScalar(dst)) { - return nil, nil - } - if isScalar(src) { - return src, nil - } - - srcExprs, err := extractPlatformStringsExprs(src) - if err != nil { - return nil, err - } - dstExprs, err := extractPlatformStringsExprs(dst) - if err != nil { - return nil, err - } - mergedExprs, err := mergePlatformStringsExprs(srcExprs, dstExprs) - if err != nil { - return nil, err - } - return makePlatformStringsExpr(mergedExprs), nil -} - -func mergePlatformStringsExprs(src, dst platformStringsExprs) (platformStringsExprs, error) { - var ps platformStringsExprs - var err error - ps.generic = mergeList(src.generic, dst.generic) - if ps.os, err = mergeDict(src.os, dst.os); err != nil { - return platformStringsExprs{}, err - } - if ps.arch, err = mergeDict(src.arch, dst.arch); err != nil { - return platformStringsExprs{}, err - } - if ps.platform, err = mergeDict(src.platform, dst.platform); err != nil { - return platformStringsExprs{}, err - } - return ps, nil -} - -func mergeList(src, dst *bzl.ListExpr) *bzl.ListExpr { - if dst == nil { - return src - } - if src == nil { - src = &bzl.ListExpr{List: []bzl.Expr{}} - } - - // Build a list of strings from the src list and keep matching strings - // in the dst list. This preserves comments. Also keep anything with - // a "# keep" comment, whether or not it's in the src list. - srcSet := make(map[string]bool) - for _, v := range src.List { - if s := stringValue(v); s != "" { - srcSet[s] = true - } - } - - var merged []bzl.Expr - kept := make(map[string]bool) - keepComment := false - for _, v := range dst.List { - s := stringValue(v) - if keep := ShouldKeep(v); keep || srcSet[s] { - keepComment = keepComment || keep - merged = append(merged, v) - if s != "" { - kept[s] = true - } - } - } - - // Add anything in the src list that wasn't kept. - for _, v := range src.List { - if s := stringValue(v); kept[s] { - continue - } - merged = append(merged, v) - } - - if len(merged) == 0 { - return nil - } - return &bzl.ListExpr{ - List: merged, - ForceMultiLine: src.ForceMultiLine || dst.ForceMultiLine || keepComment, - } -} - -func mergeDict(src, dst *bzl.DictExpr) (*bzl.DictExpr, error) { - if dst == nil { - return src, nil - } - if src == nil { - src = &bzl.DictExpr{List: []bzl.Expr{}} - } - - var entries []*dictEntry - entryMap := make(map[string]*dictEntry) - - for _, kv := range dst.List { - k, v, err := dictEntryKeyValue(kv) - if err != nil { - return nil, err - } - if _, ok := entryMap[k]; ok { - return nil, fmt.Errorf("dst dict contains more than one case named %q", k) - } - e := &dictEntry{key: k, dstValue: v} - entries = append(entries, e) - entryMap[k] = e - } - - for _, kv := range src.List { - k, v, err := dictEntryKeyValue(kv) - if err != nil { - return nil, err - } - e, ok := entryMap[k] - if !ok { - e = &dictEntry{key: k} - entries = append(entries, e) - entryMap[k] = e - } - e.srcValue = v - } - - keys := make([]string, 0, len(entries)) - haveDefault := false - for _, e := range entries { - e.mergedValue = mergeList(e.srcValue, e.dstValue) - if e.key == "//conditions:default" { - // Keep the default case, even if it's empty. - haveDefault = true - if e.mergedValue == nil { - e.mergedValue = &bzl.ListExpr{} - } - } else if e.mergedValue != nil { - keys = append(keys, e.key) - } - } - if len(keys) == 0 && (!haveDefault || len(entryMap["//conditions:default"].mergedValue.List) == 0) { - return nil, nil - } - sort.Strings(keys) - // Always put the default case last. - if haveDefault { - keys = append(keys, "//conditions:default") - } - - mergedEntries := make([]bzl.Expr, len(keys)) - for i, k := range keys { - e := entryMap[k] - mergedEntries[i] = &bzl.KeyValueExpr{ - Key: &bzl.StringExpr{Value: e.key}, - Value: e.mergedValue, - } - } - - return &bzl.DictExpr{List: mergedEntries, ForceMultiLine: true}, nil -} - -type dictEntry struct { - key string - dstValue, srcValue, mergedValue *bzl.ListExpr -} - -// SquashRules copies information from src into dst without discarding -// information in dst. SquashRules detects duplicate elements in lists and -// dictionaries, but it doesn't sort elements after squashing. If squashing -// fails because the expression is not understood, an error is returned, -// and neither rule is modified. -func SquashRules(src, dst *Rule, filename string) error { - if dst.ShouldKeep() { - return nil - } - - for key, srcAttr := range src.attrs { - srcValue := srcAttr.Y - if dstAttr, ok := dst.attrs[key]; !ok { - dst.SetAttr(key, srcValue) - } else if !ShouldKeep(dstAttr) { - dstValue := dstAttr.Y - if squashedValue, err := squashExprs(srcValue, dstValue); err != nil { - start, end := dstValue.Span() - return fmt.Errorf("%s:%d.%d-%d.%d: could not squash expression", filename, start.Line, start.LineRune, end.Line, end.LineRune) - } else { - dst.SetAttr(key, squashedValue) - } - } - } - dst.call.Comments.Before = append(dst.call.Comments.Before, src.call.Comments.Before...) - dst.call.Comments.Suffix = append(dst.call.Comments.Suffix, src.call.Comments.Suffix...) - dst.call.Comments.After = append(dst.call.Comments.After, src.call.Comments.After...) - return nil -} - -func squashExprs(src, dst bzl.Expr) (bzl.Expr, error) { - if ShouldKeep(dst) { - return dst, nil - } - if isScalar(dst) { - // may lose src, but they should always be the same. - return dst, nil - } - srcExprs, err := extractPlatformStringsExprs(src) - if err != nil { - return nil, err - } - dstExprs, err := extractPlatformStringsExprs(dst) - if err != nil { - return nil, err - } - squashedExprs, err := squashPlatformStringsExprs(srcExprs, dstExprs) - if err != nil { - return nil, err - } - return makePlatformStringsExpr(squashedExprs), nil -} - -func squashPlatformStringsExprs(x, y platformStringsExprs) (platformStringsExprs, error) { - var ps platformStringsExprs - var err error - if ps.generic, err = squashList(x.generic, y.generic); err != nil { - return platformStringsExprs{}, err - } - if ps.os, err = squashDict(x.os, y.os); err != nil { - return platformStringsExprs{}, err - } - if ps.arch, err = squashDict(x.arch, y.arch); err != nil { - return platformStringsExprs{}, err - } - if ps.platform, err = squashDict(x.platform, y.platform); err != nil { - return platformStringsExprs{}, err - } - return ps, nil -} - -func squashList(x, y *bzl.ListExpr) (*bzl.ListExpr, error) { - if x == nil { - return y, nil - } - if y == nil { - return x, nil - } - - ls := makeListSquasher() - for _, e := range x.List { - s, ok := e.(*bzl.StringExpr) - if !ok { - return nil, errors.New("could not squash non-string") - } - ls.add(s) - } - for _, e := range y.List { - s, ok := e.(*bzl.StringExpr) - if !ok { - return nil, errors.New("could not squash non-string") - } - ls.add(s) - } - squashed := ls.list() - squashed.Comments.Before = append(x.Comments.Before, y.Comments.Before...) - squashed.Comments.Suffix = append(x.Comments.Suffix, y.Comments.Suffix...) - squashed.Comments.After = append(x.Comments.After, y.Comments.After...) - return squashed, nil -} - -func squashDict(x, y *bzl.DictExpr) (*bzl.DictExpr, error) { - if x == nil { - return y, nil - } - if y == nil { - return x, nil - } - - cases := make(map[string]*bzl.KeyValueExpr) - addCase := func(e bzl.Expr) error { - kv := e.(*bzl.KeyValueExpr) - key, ok := kv.Key.(*bzl.StringExpr) - if !ok { - return errors.New("could not squash non-string dict key") - } - if _, ok := kv.Value.(*bzl.ListExpr); !ok { - return errors.New("could not squash non-list dict value") - } - if c, ok := cases[key.Value]; ok { - if sq, err := squashList(kv.Value.(*bzl.ListExpr), c.Value.(*bzl.ListExpr)); err != nil { - return err - } else { - c.Value = sq - } - } else { - kvCopy := *kv - cases[key.Value] = &kvCopy - } - return nil - } - - for _, e := range x.List { - if err := addCase(e); err != nil { - return nil, err - } - } - for _, e := range y.List { - if err := addCase(e); err != nil { - return nil, err - } - } - - keys := make([]string, 0, len(cases)) - haveDefault := false - for k := range cases { - if k == "//conditions:default" { - haveDefault = true - continue - } - keys = append(keys, k) - } - sort.Strings(keys) - if haveDefault { - keys = append(keys, "//conditions:default") // must be last - } - - squashed := *x - squashed.Comments.Before = append(x.Comments.Before, y.Comments.Before...) - squashed.Comments.Suffix = append(x.Comments.Suffix, y.Comments.Suffix...) - squashed.Comments.After = append(x.Comments.After, y.Comments.After...) - squashed.List = make([]bzl.Expr, 0, len(cases)) - for _, k := range keys { - squashed.List = append(squashed.List, cases[k]) - } - return &squashed, nil -} - -// listSquasher builds a sorted, deduplicated list of string expressions. If -// a string expression is added multiple times, comments are consolidated. -// The original expressions are not modified. -type listSquasher struct { - unique map[string]*bzl.StringExpr - seenComments map[elemComment]bool -} - -type elemComment struct { - elem, com string -} - -func makeListSquasher() listSquasher { - return listSquasher{ - unique: make(map[string]*bzl.StringExpr), - seenComments: make(map[elemComment]bool), - } -} - -func (ls *listSquasher) add(s *bzl.StringExpr) { - sCopy, ok := ls.unique[s.Value] - if !ok { - // Make a copy of s. We may modify it when we consolidate comments from - // duplicate strings. We don't want to modify the original in case this - // function fails (due to a later failed pattern match). - sCopy = new(bzl.StringExpr) - *sCopy = *s - sCopy.Comments.Before = make([]bzl.Comment, 0, len(s.Comments.Before)) - sCopy.Comments.Suffix = make([]bzl.Comment, 0, len(s.Comments.Suffix)) - ls.unique[s.Value] = sCopy - } - for _, c := range s.Comment().Before { - if key := (elemComment{s.Value, c.Token}); !ls.seenComments[key] { - sCopy.Comments.Before = append(sCopy.Comments.Before, c) - ls.seenComments[key] = true - } - } - for _, c := range s.Comment().Suffix { - if key := (elemComment{s.Value, c.Token}); !ls.seenComments[key] { - sCopy.Comments.Suffix = append(sCopy.Comments.Suffix, c) - ls.seenComments[key] = true - } - } -} - -func (ls *listSquasher) list() *bzl.ListExpr { - sortedExprs := make([]bzl.Expr, 0, len(ls.unique)) - for _, e := range ls.unique { - sortedExprs = append(sortedExprs, e) - } - sort.Slice(sortedExprs, func(i, j int) bool { - return sortedExprs[i].(*bzl.StringExpr).Value < sortedExprs[j].(*bzl.StringExpr).Value - }) - return &bzl.ListExpr{List: sortedExprs} -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform.go deleted file mode 100644 index 2a1cbace74..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform.go +++ /dev/null @@ -1,128 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "sort" -) - -// Platform represents a GOOS/GOARCH pair. When Platform is used to describe -// sources, dependencies, or flags, either OS or Arch may be empty. -type Platform struct { - OS, Arch string -} - -// String returns OS, Arch, or "OS_Arch" if both are set. This must match -// the names of config_setting rules in @io_bazel_rules_go//go/platform. -func (p Platform) String() string { - switch { - case p.OS != "" && p.Arch != "": - return p.OS + "_" + p.Arch - case p.OS != "": - return p.OS - case p.Arch != "": - return p.Arch - default: - return "" - } -} - -// KnownPlatforms is the set of target platforms that Go supports. Gazelle -// will generate multi-platform build files using these tags. rules_go and -// Bazel may not actually support all of these. -var KnownPlatforms = []Platform{ - {"android", "386"}, - {"android", "amd64"}, - {"android", "arm"}, - {"android", "arm64"}, - {"darwin", "386"}, - {"darwin", "amd64"}, - {"darwin", "arm"}, - {"darwin", "arm64"}, - {"dragonfly", "amd64"}, - {"freebsd", "386"}, - {"freebsd", "amd64"}, - {"freebsd", "arm"}, - {"linux", "386"}, - {"linux", "amd64"}, - {"linux", "arm"}, - {"linux", "arm64"}, - {"linux", "mips"}, - {"linux", "mips64"}, - {"linux", "mips64le"}, - {"linux", "mipsle"}, - {"linux", "ppc64"}, - {"linux", "ppc64le"}, - {"linux", "s390x"}, - {"nacl", "386"}, - {"nacl", "amd64p32"}, - {"nacl", "arm"}, - {"netbsd", "386"}, - {"netbsd", "amd64"}, - {"netbsd", "arm"}, - {"openbsd", "386"}, - {"openbsd", "amd64"}, - {"openbsd", "arm"}, - {"plan9", "386"}, - {"plan9", "amd64"}, - {"plan9", "arm"}, - {"solaris", "amd64"}, - {"windows", "386"}, - {"windows", "amd64"}, -} - -var ( - // KnownOSs is the sorted list of operating systems that Go supports. - KnownOSs []string - - // KnownOSSet is the set of operating systems that Go supports. - KnownOSSet map[string]bool - - // KnownArchs is the sorted list of architectures that Go supports. - KnownArchs []string - - // KnownArchSet is the set of architectures that Go supports. - KnownArchSet map[string]bool - - // KnownOSArchs is a map from OS to the archictures they run on. - KnownOSArchs map[string][]string - - // KnownArchOSs is a map from architectures to that OSs that run on them. - KnownArchOSs map[string][]string -) - -func init() { - KnownOSSet = make(map[string]bool) - KnownArchSet = make(map[string]bool) - KnownOSArchs = make(map[string][]string) - KnownArchOSs = make(map[string][]string) - for _, p := range KnownPlatforms { - KnownOSSet[p.OS] = true - KnownArchSet[p.Arch] = true - KnownOSArchs[p.OS] = append(KnownOSArchs[p.OS], p.Arch) - KnownArchOSs[p.Arch] = append(KnownArchOSs[p.Arch], p.OS) - } - KnownOSs = make([]string, 0, len(KnownOSSet)) - KnownArchs = make([]string, 0, len(KnownArchSet)) - for os := range KnownOSSet { - KnownOSs = append(KnownOSs, os) - } - for arch := range KnownArchSet { - KnownArchs = append(KnownArchs, arch) - } - sort.Strings(KnownOSs) - sort.Strings(KnownArchs) -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform_strings.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform_strings.go deleted file mode 100644 index bb9f52f82e..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/platform_strings.go +++ /dev/null @@ -1,192 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "sort" - "strings" -) - -// PlatformStrings contains a set of strings associated with a buildable -// target in a package. This is used to store source file names, -// import paths, and flags. -// -// Strings are stored in four sets: generic strings, OS-specific strings, -// arch-specific strings, and OS-and-arch-specific strings. A string may not -// be duplicated within a list or across sets; however, a string may appear -// in more than one list within a set (e.g., in "linux" and "windows" within -// the OS set). Strings within each list should be sorted, though this may -// not be relied upon. -type PlatformStrings struct { - // Generic is a list of strings not specific to any platform. - Generic []string - - // OS is a map from OS name (anything in KnownOSs) to - // OS-specific strings. - OS map[string][]string - - // Arch is a map from architecture name (anything in KnownArchs) to - // architecture-specific strings. - Arch map[string][]string - - // Platform is a map from platforms to OS and architecture-specific strings. - Platform map[Platform][]string -} - -// HasExt returns whether this set contains a file with the given extension. -func (ps *PlatformStrings) HasExt(ext string) bool { - return ps.firstExtFile(ext) != "" -} - -func (ps *PlatformStrings) IsEmpty() bool { - return len(ps.Generic) == 0 && len(ps.OS) == 0 && len(ps.Arch) == 0 && len(ps.Platform) == 0 -} - -// Flat returns all the strings in the set, sorted and de-duplicated. -func (ps *PlatformStrings) Flat() []string { - unique := make(map[string]struct{}) - for _, s := range ps.Generic { - unique[s] = struct{}{} - } - for _, ss := range ps.OS { - for _, s := range ss { - unique[s] = struct{}{} - } - } - for _, ss := range ps.Arch { - for _, s := range ss { - unique[s] = struct{}{} - } - } - for _, ss := range ps.Platform { - for _, s := range ss { - unique[s] = struct{}{} - } - } - flat := make([]string, 0, len(unique)) - for s := range unique { - flat = append(flat, s) - } - sort.Strings(flat) - return flat -} - -func (ps *PlatformStrings) firstExtFile(ext string) string { - for _, f := range ps.Generic { - if strings.HasSuffix(f, ext) { - return f - } - } - for _, fs := range ps.OS { - for _, f := range fs { - if strings.HasSuffix(f, ext) { - return f - } - } - } - for _, fs := range ps.Arch { - for _, f := range fs { - if strings.HasSuffix(f, ext) { - return f - } - } - } - for _, fs := range ps.Platform { - for _, f := range fs { - if strings.HasSuffix(f, ext) { - return f - } - } - } - return "" -} - -// Map applies a function that processes individual strings to the strings -// in "ps" and returns a new PlatformStrings with the result. Empty strings -// returned by the function are dropped. -func (ps *PlatformStrings) Map(f func(s string) (string, error)) (PlatformStrings, []error) { - var errors []error - mapSlice := func(ss []string) ([]string, error) { - rs := make([]string, 0, len(ss)) - for _, s := range ss { - if r, err := f(s); err != nil { - errors = append(errors, err) - } else if r != "" { - rs = append(rs, r) - } - } - return rs, nil - } - result, _ := ps.MapSlice(mapSlice) - return result, errors -} - -// MapSlice applies a function that processes slices of strings to the strings -// in "ps" and returns a new PlatformStrings with the results. -func (ps *PlatformStrings) MapSlice(f func([]string) ([]string, error)) (PlatformStrings, []error) { - var errors []error - - mapSlice := func(ss []string) []string { - rs, err := f(ss) - if err != nil { - errors = append(errors, err) - return nil - } - return rs - } - - mapStringMap := func(m map[string][]string) map[string][]string { - if m == nil { - return nil - } - rm := make(map[string][]string) - for k, ss := range m { - ss = mapSlice(ss) - if len(ss) > 0 { - rm[k] = ss - } - } - if len(rm) == 0 { - return nil - } - return rm - } - - mapPlatformMap := func(m map[Platform][]string) map[Platform][]string { - if m == nil { - return nil - } - rm := make(map[Platform][]string) - for k, ss := range m { - ss = mapSlice(ss) - if len(ss) > 0 { - rm[k] = ss - } - } - if len(rm) == 0 { - return nil - } - return rm - } - - result := PlatformStrings{ - Generic: mapSlice(ps.Generic), - OS: mapStringMap(ps.OS), - Arch: mapStringMap(ps.Arch), - Platform: mapPlatformMap(ps.Platform), - } - return result, errors -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/rule.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/rule.go deleted file mode 100644 index f46eda0f3b..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/rule.go +++ /dev/null @@ -1,692 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package rule provides tools for editing Bazel build files. It is intended to -// be a more powerful replacement for -// github.com/bazelbuild/buildtools/build.Rule, adapted for Gazelle's usage. It -// is language agnostic, but it may be used for language-specific rules by -// providing configuration. -// -// File is the primary interface to this package. A File represents an -// individual build file. It comprises a list of Rules and a list of Loads. -// Rules and Loads may be inserted, modified, or deleted. When all changes -// are done, File.Save() may be called to write changes back to a file. -package rule - -import ( - "io/ioutil" - "os" - "path/filepath" - "sort" - "strings" - - bzl "github.com/bazelbuild/buildtools/build" - bt "github.com/bazelbuild/buildtools/tables" -) - -// File provides editing functionality for a build file. You can create a -// new file with EmptyFile or load an existing file with LoadFile. After -// changes have been made, call Save to write changes back to a file. -type File struct { - // File is the underlying build file syntax tree. Some editing operations - // may modify this, but editing is not complete until Sync() is called. - File *bzl.File - - // Pkg is the Bazel package this build file defines. - Pkg string - - // Path is the file system path to the build file (same as File.Path). - Path string - - // Directives is a list of configuration directives found in top-level - // comments in the file. This should not be modified after the file is read. - Directives []Directive - - // Loads is a list of load statements within the file. This should not - // be modified directly; use Load methods instead. - Loads []*Load - - // Rules is a list of rules within the file (or function calls that look like - // rules). This should not be modified directly; use Rule methods instead. - Rules []*Rule -} - -// EmptyFile creates a File wrapped around an empty syntax tree. -func EmptyFile(path, pkg string) *File { - return &File{ - File: &bzl.File{Path: path}, - Path: path, - Pkg: pkg, - } -} - -// LoadFile loads a build file from disk, parses it, and scans for rules and -// load statements. The syntax tree within the returned File will be modified -// by editing methods. -// -// This function returns I/O and parse errors without modification. It's safe -// to use os.IsNotExist and similar predicates. -func LoadFile(path, pkg string) (*File, error) { - data, err := ioutil.ReadFile(path) - if err != nil { - return nil, err - } - return LoadData(path, pkg, data) -} - -// LoadData parses a build file from a byte slice and scans it for rules and -// load statements. The syntax tree within the returned File will be modified -// by editing methods. -func LoadData(path, pkg string, data []byte) (*File, error) { - ast, err := bzl.Parse(path, data) - if err != nil { - return nil, err - } - return ScanAST(pkg, ast), nil -} - -// ScanAST creates a File wrapped around the given syntax tree. This tree -// will be modified by editing methods. -func ScanAST(pkg string, bzlFile *bzl.File) *File { - f := &File{ - File: bzlFile, - Pkg: pkg, - Path: bzlFile.Path, - } - for i, stmt := range f.File.Stmt { - call, ok := stmt.(*bzl.CallExpr) - if !ok { - continue - } - x, ok := call.X.(*bzl.LiteralExpr) - if !ok { - continue - } - if x.Token == "load" { - if l := loadFromExpr(i, call); l != nil { - f.Loads = append(f.Loads, l) - } - } else { - if r := ruleFromExpr(i, call); r != nil { - f.Rules = append(f.Rules, r) - } - } - } - f.Directives = ParseDirectives(bzlFile) - return f -} - -// MatchBuildFileName looks for a file in files that has a name from names. -// If there is at least one matching file, a path will be returned by joining -// dir and the first matching name. If there are no matching files, the -// empty string is returned. -func MatchBuildFileName(dir string, names []string, files []os.FileInfo) string { - for _, name := range names { - for _, fi := range files { - if fi.Name() == name && !fi.IsDir() { - return filepath.Join(dir, name) - } - } - } - return "" -} - -// Sync writes all changes back to the wrapped syntax tree. This should be -// called after editing operations, before reading the syntax tree again. -func (f *File) Sync() { - var inserts, deletes, stmts []*stmt - var r, w int - for r, w = 0, 0; r < len(f.Loads); r++ { - s := f.Loads[r] - s.sync() - if s.deleted { - deletes = append(deletes, &s.stmt) - continue - } - if s.inserted { - inserts = append(inserts, &s.stmt) - s.inserted = false - } else { - stmts = append(stmts, &s.stmt) - } - f.Loads[w] = s - w++ - } - f.Loads = f.Loads[:w] - for r, w = 0, 0; r < len(f.Rules); r++ { - s := f.Rules[r] - s.sync() - if s.deleted { - deletes = append(deletes, &s.stmt) - continue - } - if s.inserted { - inserts = append(inserts, &s.stmt) - s.inserted = false - } else { - stmts = append(stmts, &s.stmt) - } - f.Rules[w] = s - w++ - } - f.Rules = f.Rules[:w] - sort.Stable(byIndex(deletes)) - sort.Stable(byIndex(inserts)) - sort.Stable(byIndex(stmts)) - - oldStmt := f.File.Stmt - f.File.Stmt = make([]bzl.Expr, 0, len(oldStmt)-len(deletes)+len(inserts)) - var ii, di, si int - for i, stmt := range oldStmt { - for ii < len(inserts) && inserts[ii].index == i { - inserts[ii].index = len(f.File.Stmt) - f.File.Stmt = append(f.File.Stmt, inserts[ii].call) - ii++ - } - if di < len(deletes) && deletes[di].index == i { - di++ - continue - } - if si < len(stmts) && stmts[si].call == stmt { - stmts[si].index = len(f.File.Stmt) - si++ - } - f.File.Stmt = append(f.File.Stmt, stmt) - } - for ii < len(inserts) { - inserts[ii].index = len(f.File.Stmt) - f.File.Stmt = append(f.File.Stmt, inserts[ii].call) - ii++ - } -} - -// Format formats the build file in a form that can be written to disk. -// This method calls Sync internally. -func (f *File) Format() []byte { - f.Sync() - return bzl.Format(f.File) -} - -// Save writes the build file to disk. This method calls Sync internally. -func (f *File) Save(path string) error { - f.Sync() - data := bzl.Format(f.File) - return ioutil.WriteFile(path, data, 0666) -} - -type stmt struct { - index int - deleted, inserted, updated bool - call *bzl.CallExpr -} - -// Index returns the index for this statement within the build file. For -// inserted rules, this is where the rule will be inserted (rules with the -// same index will be inserted in the order Insert was called). For existing -// rules, this is the index of the original statement. -func (s *stmt) Index() int { return s.index } - -// Delete marks this statement for deletion. It will be removed from the -// syntax tree when File.Sync is called. -func (s *stmt) Delete() { s.deleted = true } - -type byIndex []*stmt - -func (s byIndex) Len() int { - return len(s) -} - -func (s byIndex) Less(i, j int) bool { - return s[i].index < s[j].index -} - -func (s byIndex) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} - -// Load represents a load statement within a build file. -type Load struct { - stmt - name string - symbols map[string]bzl.Expr -} - -// NewLoad creates a new, empty load statement for the given file name. -func NewLoad(name string) *Load { - return &Load{ - stmt: stmt{ - call: &bzl.CallExpr{ - X: &bzl.LiteralExpr{Token: "load"}, - List: []bzl.Expr{&bzl.StringExpr{Value: name}}, - ForceCompact: true, - }, - }, - name: name, - symbols: make(map[string]bzl.Expr), - } -} - -func loadFromExpr(index int, call *bzl.CallExpr) *Load { - l := &Load{ - stmt: stmt{index: index, call: call}, - symbols: make(map[string]bzl.Expr), - } - if len(call.List) == 0 { - return nil - } - name, ok := call.List[0].(*bzl.StringExpr) - if !ok { - return nil - } - l.name = name.Value - for _, arg := range call.List[1:] { - switch arg := arg.(type) { - case *bzl.StringExpr: - l.symbols[arg.Value] = arg - case *bzl.BinaryExpr: - x, ok := arg.X.(*bzl.LiteralExpr) - if !ok { - return nil - } - if _, ok := arg.Y.(*bzl.StringExpr); !ok { - return nil - } - l.symbols[x.Token] = arg - default: - return nil - } - } - return l -} - -// Name returns the name of the file this statement loads. -func (l *Load) Name() string { - return l.name -} - -// Symbols returns a list of symbols this statement loads. -func (l *Load) Symbols() []string { - syms := make([]string, 0, len(l.symbols)) - for sym := range l.symbols { - syms = append(syms, sym) - } - sort.Strings(syms) - return syms -} - -// Has returns true if sym is loaded by this statement. -func (l *Load) Has(sym string) bool { - _, ok := l.symbols[sym] - return ok -} - -// Add inserts a new symbol into the load statement. This has no effect if -// the symbol is already loaded. Symbols will be sorted, so the order -// doesn't matter. -func (l *Load) Add(sym string) { - if _, ok := l.symbols[sym]; !ok { - l.symbols[sym] = &bzl.StringExpr{Value: sym} - l.updated = true - } -} - -// Remove deletes a symbol from the load statement. This has no effect if -// the symbol is not loaded. -func (l *Load) Remove(sym string) { - if _, ok := l.symbols[sym]; ok { - delete(l.symbols, sym) - l.updated = true - } -} - -// IsEmpty returns whether this statement loads any symbols. -func (l *Load) IsEmpty() bool { - return len(l.symbols) == 0 -} - -// Insert marks this statement for insertion at the given index. If multiple -// statements are inserted at the same index, they will be inserted in the -// order Insert is called. -func (l *Load) Insert(f *File, index int) { - l.index = index - l.inserted = true - f.Loads = append(f.Loads, l) -} - -func (l *Load) sync() { - if !l.updated { - return - } - l.updated = false - - args := make([]*bzl.StringExpr, 0, len(l.symbols)) - kwargs := make([]*bzl.BinaryExpr, 0, len(l.symbols)) - for _, e := range l.symbols { - if a, ok := e.(*bzl.StringExpr); ok { - args = append(args, a) - } else { - kwargs = append(kwargs, e.(*bzl.BinaryExpr)) - } - } - sort.Slice(args, func(i, j int) bool { - return args[i].Value < args[j].Value - }) - sort.Slice(kwargs, func(i, j int) bool { - return kwargs[i].X.(*bzl.StringExpr).Value < kwargs[j].Y.(*bzl.StringExpr).Value - }) - - list := make([]bzl.Expr, 0, 1+len(l.symbols)) - list = append(list, l.call.List[0]) - for _, a := range args { - list = append(list, a) - } - for _, a := range kwargs { - list = append(list, a) - } - l.call.List = list - l.call.ForceCompact = len(kwargs) == 0 -} - -// Rule represents a rule statement within a build file. -type Rule struct { - stmt - kind string - args []bzl.Expr - attrs map[string]*bzl.BinaryExpr - private map[string]interface{} -} - -// NewRule creates a new, empty rule with the given kind and name. -func NewRule(kind, name string) *Rule { - nameAttr := &bzl.BinaryExpr{ - X: &bzl.LiteralExpr{Token: "name"}, - Y: &bzl.StringExpr{Value: name}, - Op: "=", - } - r := &Rule{ - stmt: stmt{ - call: &bzl.CallExpr{ - X: &bzl.LiteralExpr{Token: kind}, - List: []bzl.Expr{nameAttr}, - }, - }, - kind: kind, - attrs: map[string]*bzl.BinaryExpr{"name": nameAttr}, - private: map[string]interface{}{}, - } - return r -} - -func ruleFromExpr(index int, expr bzl.Expr) *Rule { - call, ok := expr.(*bzl.CallExpr) - if !ok { - return nil - } - x, ok := call.X.(*bzl.LiteralExpr) - if !ok { - return nil - } - kind := x.Token - var args []bzl.Expr - attrs := make(map[string]*bzl.BinaryExpr) - for _, arg := range call.List { - attr, ok := arg.(*bzl.BinaryExpr) - if ok && attr.Op == "=" { - key := attr.X.(*bzl.LiteralExpr) // required by parser - attrs[key.Token] = attr - } else { - args = append(args, arg) - } - } - return &Rule{ - stmt: stmt{ - index: index, - call: call, - }, - kind: kind, - args: args, - attrs: attrs, - private: map[string]interface{}{}, - } -} - -// ShouldKeep returns whether the rule is marked with a "# keep" comment. Rules -// that are kept should not be modified. This does not check whether -// subexpressions within the rule should be kept. -func (r *Rule) ShouldKeep() bool { - return ShouldKeep(r.call) -} - -// Kind returns the kind of rule this is (for example, "go_library"). -func (r *Rule) Kind() string { - return r.kind -} - -// SetKind changes the kind of rule this is. -func (r *Rule) SetKind(kind string) { - r.kind = kind - r.updated = true -} - -// Name returns the value of the rule's "name" attribute if it is a string -// or "" if the attribute does not exist or is not a string. -func (r *Rule) Name() string { - return r.AttrString("name") -} - -// SetName sets the value of the rule's "name" attribute. -func (r *Rule) SetName(name string) { - r.SetAttr("name", name) -} - -// AttrKeys returns a sorted list of attribute keys used in this rule. -func (r *Rule) AttrKeys() []string { - keys := make([]string, 0, len(r.attrs)) - for k := range r.attrs { - keys = append(keys, k) - } - sort.SliceStable(keys, func(i, j int) bool { - if cmp := bt.NamePriority[keys[i]] - bt.NamePriority[keys[j]]; cmp != 0 { - return cmp < 0 - } - return keys[i] < keys[j] - }) - return keys -} - -// Attr returns the value of the named attribute. nil is returned when the -// attribute is not set. -func (r *Rule) Attr(key string) bzl.Expr { - attr, ok := r.attrs[key] - if !ok { - return nil - } - return attr.Y -} - -// AttrString returns the value of the named attribute if it is a scalar string. -// "" is returned if the attribute is not set or is not a string. -func (r *Rule) AttrString(key string) string { - attr, ok := r.attrs[key] - if !ok { - return "" - } - str, ok := attr.Y.(*bzl.StringExpr) - if !ok { - return "" - } - return str.Value -} - -// AttrStrings returns the string values of an attribute if it is a list. -// nil is returned if the attribute is not set or is not a list. Non-string -// values within the list won't be returned. -func (r *Rule) AttrStrings(key string) []string { - attr, ok := r.attrs[key] - if !ok { - return nil - } - list, ok := attr.Y.(*bzl.ListExpr) - if !ok { - return nil - } - strs := make([]string, 0, len(list.List)) - for _, e := range list.List { - if str, ok := e.(*bzl.StringExpr); ok { - strs = append(strs, str.Value) - } - } - return strs -} - -// DelAttr removes the named attribute from the rule. -func (r *Rule) DelAttr(key string) { - delete(r.attrs, key) - r.updated = true -} - -// SetAttr adds or replaces the named attribute with an expression produced -// by ExprFromValue. -func (r *Rule) SetAttr(key string, value interface{}) { - y := ExprFromValue(value) - if attr, ok := r.attrs[key]; ok { - attr.Y = y - } else { - r.attrs[key] = &bzl.BinaryExpr{ - X: &bzl.LiteralExpr{Token: key}, - Y: y, - Op: "=", - } - } - r.updated = true -} - -// PrivateAttrKeys returns a sorted list of private attribute names. -func (r *Rule) PrivateAttrKeys() []string { - keys := make([]string, 0, len(r.private)) - for k := range r.private { - keys = append(keys, k) - } - sort.Strings(keys) - return keys -} - -// PrivateAttr return the private value associated with a key. -func (r *Rule) PrivateAttr(key string) interface{} { - return r.private[key] -} - -// SetPrivateAttr associates a value with a key. Unlike SetAttr, this value -// is not converted to a build syntax tree and will not be written to a build -// file. -func (r *Rule) SetPrivateAttr(key string, value interface{}) { - r.private[key] = value -} - -// Args returns positional arguments passed to a rule. -func (r *Rule) Args() []bzl.Expr { - return r.args -} - -// Insert marks this statement for insertion at the end of the file. Multiple -// statements will be inserted in the order Insert is called. -func (r *Rule) Insert(f *File) { - // TODO(jayconrod): should rules always be inserted at the end? Should there - // be some sort order? - r.index = len(f.File.Stmt) - r.inserted = true - f.Rules = append(f.Rules, r) -} - -// IsEmpty returns true when the rule contains none of the attributes in attrs -// for its kind. attrs should contain attributes that make the rule buildable -// like srcs or deps and not descriptive attributes like name or visibility. -func (r *Rule) IsEmpty(info KindInfo) bool { - if info.NonEmptyAttrs == nil { - return false - } - for k := range info.NonEmptyAttrs { - if _, ok := r.attrs[k]; ok { - return false - } - } - return true -} - -func (r *Rule) sync() { - if !r.updated { - return - } - r.updated = false - - for _, k := range []string{"srcs", "deps"} { - if attr, ok := r.attrs[k]; ok { - bzl.Walk(attr.Y, sortExprLabels) - } - } - - call := r.call - call.X.(*bzl.LiteralExpr).Token = r.kind - - list := make([]bzl.Expr, 0, len(r.args)+len(r.attrs)) - list = append(list, r.args...) - for _, attr := range r.attrs { - list = append(list, attr) - } - sortedAttrs := list[len(r.args):] - key := func(e bzl.Expr) string { return e.(*bzl.BinaryExpr).X.(*bzl.LiteralExpr).Token } - sort.SliceStable(sortedAttrs, func(i, j int) bool { - ki := key(sortedAttrs[i]) - kj := key(sortedAttrs[j]) - if cmp := bt.NamePriority[ki] - bt.NamePriority[kj]; cmp != 0 { - return cmp < 0 - } - return ki < kj - }) - - r.call.List = list - r.updated = false -} - -// ShouldKeep returns whether e is marked with a "# keep" comment. Kept -// expressions should not be removed or modified. -func ShouldKeep(e bzl.Expr) bool { - for _, c := range append(e.Comment().Before, e.Comment().Suffix...) { - text := strings.TrimSpace(strings.TrimPrefix(c.Token, "#")) - if text == "keep" { - return true - } - } - return false -} - -type byAttrName []KeyValue - -var _ sort.Interface = byAttrName{} - -func (s byAttrName) Len() int { - return len(s) -} - -func (s byAttrName) Less(i, j int) bool { - if cmp := bt.NamePriority[s[i].Key] - bt.NamePriority[s[j].Key]; cmp != 0 { - return cmp < 0 - } - return s[i].Key < s[j].Key -} - -func (s byAttrName) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/sort_labels.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/sort_labels.go deleted file mode 100644 index bd27eb5582..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/sort_labels.go +++ /dev/null @@ -1,114 +0,0 @@ -/* Copyright 2017 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "sort" - "strings" - - bzl "github.com/bazelbuild/buildtools/build" -) - -// sortExprLabels sorts lists of strings using the same order as buildifier. -// Buildifier also sorts string lists, but not those involved with "select" -// expressions. This function is intended to be used with bzl.Walk. -func sortExprLabels(e bzl.Expr, _ []bzl.Expr) { - list, ok := e.(*bzl.ListExpr) - if !ok || len(list.List) == 0 { - return - } - - keys := make([]stringSortKey, len(list.List)) - for i, elem := range list.List { - s, ok := elem.(*bzl.StringExpr) - if !ok { - return // don't sort lists unless all elements are strings - } - keys[i] = makeSortKey(i, s) - } - - before := keys[0].x.Comment().Before - keys[0].x.Comment().Before = nil - sort.Sort(byStringExpr(keys)) - keys[0].x.Comment().Before = append(before, keys[0].x.Comment().Before...) - for i, k := range keys { - list.List[i] = k.x - } -} - -// Code below this point is adapted from -// github.com/bazelbuild/buildtools/build/rewrite.go - -// A stringSortKey records information about a single string literal to be -// sorted. The strings are first grouped into four phases: most strings, -// strings beginning with ":", strings beginning with "//", and strings -// beginning with "@". The next significant part of the comparison is the list -// of elements in the value, where elements are split at `.' and `:'. Finally -// we compare by value and break ties by original index. -type stringSortKey struct { - phase int - split []string - value string - original int - x bzl.Expr -} - -func makeSortKey(index int, x *bzl.StringExpr) stringSortKey { - key := stringSortKey{ - value: x.Value, - original: index, - x: x, - } - - switch { - case strings.HasPrefix(x.Value, ":"): - key.phase = 1 - case strings.HasPrefix(x.Value, "//"): - key.phase = 2 - case strings.HasPrefix(x.Value, "@"): - key.phase = 3 - } - - key.split = strings.Split(strings.Replace(x.Value, ":", ".", -1), ".") - return key -} - -// byStringExpr implements sort.Interface for a list of stringSortKey. -type byStringExpr []stringSortKey - -func (x byStringExpr) Len() int { return len(x) } -func (x byStringExpr) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byStringExpr) Less(i, j int) bool { - xi := x[i] - xj := x[j] - - if xi.phase != xj.phase { - return xi.phase < xj.phase - } - for k := 0; k < len(xi.split) && k < len(xj.split); k++ { - if xi.split[k] != xj.split[k] { - return xi.split[k] < xj.split[k] - } - } - if len(xi.split) != len(xj.split) { - return len(xi.split) < len(xj.split) - } - if xi.value != xj.value { - return xi.value < xj.value - } - return xi.original < xj.original -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/types.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/types.go deleted file mode 100644 index 7f5fd20823..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/types.go +++ /dev/null @@ -1,56 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -// LoadInfo describes a file that Gazelle knows about and the symbols -// it defines. -type LoadInfo struct { - Name string - Symbols []string - After []string -} - -// KindInfo stores metadata for a kind or fule, for example, "go_library". -type KindInfo struct { - // MatchAny is true if a rule of this kind may be matched with any rule - // of the same kind, regardless of attributes, if exactly one rule is - // present a build file. - MatchAny bool - - // MatchAttrs is a list of attributes used in matching. For example, - // for go_library, this list contains "importpath". Attributes are matched - // in order. - MatchAttrs []string - - // NonEmptyAttrs is a set of attributes that, if present, disqualify a rule - // from being deleted after merge. - NonEmptyAttrs map[string]bool - - // SubstituteAttrs is a set of attributes that should be substituted - // after matching and before merging. For example, suppose generated rule A - // references B via an "embed" attribute, and B matches against rule C. - // The label for B in A's "embed" must be substituted with a label for C. - // "embed" would need to be in this set. - SubstituteAttrs map[string]bool - - // MergeableAttrs is a set of attributes that should be merged before - // dependency resolution. See rule.Merge. - MergeableAttrs map[string]bool - - // ResolveAttrs is a set of attributes that should be merged after - // dependency resolution. See rule.Merge. - ResolveAttrs map[string]bool -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/value.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/value.go deleted file mode 100644 index b60838f906..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/rule/value.go +++ /dev/null @@ -1,184 +0,0 @@ -/* Copyright 2016 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package rule - -import ( - "fmt" - "log" - "reflect" - "sort" - - bzl "github.com/bazelbuild/buildtools/build" -) - -// KeyValue represents a key-value pair. This gets converted into a -// rule attribute, i.e., a Skylark keyword argument. -type KeyValue struct { - Key string - Value interface{} -} - -// GlobValue represents a Bazel glob expression. -type GlobValue struct { - Patterns []string - Excludes []string -} - -// ExprFromValue converts a value into an expression that can be written into -// a Bazel build file. The following types of values can be converted: -// -// * bools, integers, floats, strings. -// * slices, arrays (converted to lists). -// * maps (converted to select expressions; keys must be rules in -// @io_bazel_rules_go//go/platform). -// * GlobValue (converted to glob expressions). -// * PlatformStrings (converted to a concatenation of a list and selects). -// -// Converting unsupported types will cause a panic. -func ExprFromValue(val interface{}) bzl.Expr { - if e, ok := val.(bzl.Expr); ok { - return e - } - - rv := reflect.ValueOf(val) - switch rv.Kind() { - case reflect.Bool: - tok := "False" - if rv.Bool() { - tok = "True" - } - return &bzl.LiteralExpr{Token: tok} - - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return &bzl.LiteralExpr{Token: fmt.Sprintf("%d", val)} - - case reflect.Float32, reflect.Float64: - return &bzl.LiteralExpr{Token: fmt.Sprintf("%f", val)} - - case reflect.String: - return &bzl.StringExpr{Value: val.(string)} - - case reflect.Slice, reflect.Array: - var list []bzl.Expr - for i := 0; i < rv.Len(); i++ { - elem := ExprFromValue(rv.Index(i).Interface()) - list = append(list, elem) - } - return &bzl.ListExpr{List: list} - - case reflect.Map: - rkeys := rv.MapKeys() - sort.Sort(byString(rkeys)) - args := make([]bzl.Expr, len(rkeys)) - for i, rk := range rkeys { - label := fmt.Sprintf("@io_bazel_rules_go//go/platform:%s", mapKeyString(rk)) - k := &bzl.StringExpr{Value: label} - v := ExprFromValue(rv.MapIndex(rk).Interface()) - if l, ok := v.(*bzl.ListExpr); ok { - l.ForceMultiLine = true - } - args[i] = &bzl.KeyValueExpr{Key: k, Value: v} - } - args = append(args, &bzl.KeyValueExpr{ - Key: &bzl.StringExpr{Value: "//conditions:default"}, - Value: &bzl.ListExpr{}, - }) - sel := &bzl.CallExpr{ - X: &bzl.LiteralExpr{Token: "select"}, - List: []bzl.Expr{&bzl.DictExpr{List: args, ForceMultiLine: true}}, - } - return sel - - case reflect.Struct: - switch val := val.(type) { - case GlobValue: - patternsValue := ExprFromValue(val.Patterns) - globArgs := []bzl.Expr{patternsValue} - if len(val.Excludes) > 0 { - excludesValue := ExprFromValue(val.Excludes) - globArgs = append(globArgs, &bzl.KeyValueExpr{ - Key: &bzl.StringExpr{Value: "excludes"}, - Value: excludesValue, - }) - } - return &bzl.CallExpr{ - X: &bzl.LiteralExpr{Token: "glob"}, - List: globArgs, - } - - case PlatformStrings: - var pieces []bzl.Expr - if len(val.Generic) > 0 { - pieces = append(pieces, ExprFromValue(val.Generic)) - } - if len(val.OS) > 0 { - pieces = append(pieces, ExprFromValue(val.OS)) - } - if len(val.Arch) > 0 { - pieces = append(pieces, ExprFromValue(val.Arch)) - } - if len(val.Platform) > 0 { - pieces = append(pieces, ExprFromValue(val.Platform)) - } - if len(pieces) == 0 { - return &bzl.ListExpr{} - } else if len(pieces) == 1 { - return pieces[0] - } else { - e := pieces[0] - if list, ok := e.(*bzl.ListExpr); ok { - list.ForceMultiLine = true - } - for _, piece := range pieces[1:] { - e = &bzl.BinaryExpr{X: e, Y: piece, Op: "+"} - } - return e - } - } - } - - log.Panicf("type not supported: %T", val) - return nil -} - -func mapKeyString(k reflect.Value) string { - switch s := k.Interface().(type) { - case string: - return s - case Platform: - return s.String() - default: - log.Panicf("unexpected map key: %v", k) - return "" - } -} - -type byString []reflect.Value - -var _ sort.Interface = byString{} - -func (s byString) Len() int { - return len(s) -} - -func (s byString) Less(i, j int) bool { - return mapKeyString(s[i]) < mapKeyString(s[j]) -} - -func (s byString) Swap(i, j int) { - s[i], s[j] = s[j], s[i] -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/BUILD.bazel deleted file mode 100644 index 9ea6b76b8d..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/BUILD.bazel +++ /dev/null @@ -1,18 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "config.go", - "walk.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk", - importpath = "github.com/bazelbuild/bazel-gazelle/walk", - visibility = ["//visibility:public"], - deps = [ - "//vendor/github.com/bazelbuild/bazel-gazelle/config:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/flag:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/pathtools:go_default_library", - "//vendor/github.com/bazelbuild/bazel-gazelle/rule:go_default_library", - ], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/config.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/config.go deleted file mode 100644 index d363557a9f..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/config.go +++ /dev/null @@ -1,83 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package walk - -import ( - "flag" - "path" - - "github.com/bazelbuild/bazel-gazelle/config" - gzflag "github.com/bazelbuild/bazel-gazelle/flag" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -type walkConfig struct { - excludes []string - ignore bool - follow []string -} - -const walkName = "_walk" - -func getWalkConfig(c *config.Config) *walkConfig { - return c.Exts[walkName].(*walkConfig) -} - -func (wc *walkConfig) isExcluded(rel, base string) bool { - f := path.Join(rel, base) - for _, x := range wc.excludes { - if f == x { - return true - } - } - return false -} - -type Configurer struct{} - -func (_ *Configurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) { - wc := &walkConfig{} - c.Exts[walkName] = wc - fs.Var(&gzflag.MultiFlag{Values: &wc.excludes}, "exclude", "Path to file or directory that should be ignored (may be repeated)") -} - -func (_ *Configurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error { return nil } - -func (_ *Configurer) KnownDirectives() []string { - return []string{"exclude", "follow", "ignore"} -} - -func (_ *Configurer) Configure(c *config.Config, rel string, f *rule.File) { - wc := getWalkConfig(c) - wcCopy := &walkConfig{} - *wcCopy = *wc - wcCopy.ignore = false - - if f != nil { - for _, d := range f.Directives { - switch d.Key { - case "exclude": - wcCopy.excludes = append(wcCopy.excludes, path.Join(rel, d.Value)) - case "follow": - wcCopy.follow = append(wcCopy.follow, path.Join(rel, d.Value)) - case "ignore": - wcCopy.ignore = true - } - } - } - - c.Exts[walkName] = wcCopy -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/walk.go b/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/walk.go deleted file mode 100644 index 23e5e7f635..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/bazel-gazelle/walk/walk.go +++ /dev/null @@ -1,328 +0,0 @@ -/* Copyright 2018 The Bazel Authors. All rights reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// Package walk provides customizable functionality for visiting each -// subdirectory in a directory tree. -package walk - -import ( - "io/ioutil" - "log" - "os" - "path" - "path/filepath" - "strings" - - "github.com/bazelbuild/bazel-gazelle/config" - "github.com/bazelbuild/bazel-gazelle/pathtools" - "github.com/bazelbuild/bazel-gazelle/rule" -) - -// Mode determines which directories Walk visits and which directories -// should be updated. -type Mode int - -const ( - // In VisitAllUpdateSubdirsMode, Walk visits every directory in the - // repository. The directories given to Walk and their subdirectories are - // updated. - VisitAllUpdateSubdirsMode Mode = iota - - // In VisitAllUpdateDirsMode, Walk visits every directory in the repository. - // Only the directories given to Walk are updated (not their subdirectories). - VisitAllUpdateDirsMode - - // In UpdateDirsMode, Walk only visits and updates directories given to Walk. - // Build files in parent directories are read in order to produce a complete - // configuration, but the callback is not called for parent directories. - UpdateDirsMode -) - -// WalkFunc is a callback called by Walk in each visited directory. -// -// dir is the absolute file system path to the directory being visited. -// -// rel is the relative slash-separated path to the directory from the -// repository root. Will be "" for the repository root directory itself. -// -// c is the configuration for the current directory. This may have been -// modified by directives in the directory's build file. -// -// update is true when the build file may be updated. -// -// f is the existing build file in the directory. Will be nil if there -// was no file. -// -// subdirs is a list of base names of subdirectories within dir, not -// including excluded files. -// -// regularFiles is a list of base names of regular files within dir, not -// including excluded files. -// -// genFiles is a list of names of generated files, found by reading -// "out" and "outs" attributes of rules in f. -type WalkFunc func(dir, rel string, c *config.Config, update bool, f *rule.File, subdirs, regularFiles, genFiles []string) - -// Walk traverses the directory tree rooted at c.RepoRoot. Walk visits -// subdirectories in depth-first post-order. -// -// When Walk visits a directory, it lists the files and subdirectories within -// that directory. If a build file is present, Walk reads the build file and -// applies any directives to the configuration (a copy of the parent directory's -// configuration is made, and the copy is modified). After visiting -// subdirectories, the callback wf may be called, depending on the mode. -// -// c is the root configuration to start with. This includes changes made by -// command line flags, but not by the root build file. This configuration -// should not be modified. -// -// cexts is a list of configuration extensions. When visiting a directory, -// before visiting subdirectories, Walk makes a copy of the parent configuration -// and Configure for each extension on the copy. If Walk sees a directive -// that is not listed in KnownDirectives of any extension, an error will -// be logged. -// -// dirs is a list of absolute, canonical file system paths of directories -// to visit. -// -// mode determines whether subdirectories of dirs should be visited recursively, -// when the wf callback should be called, and when the "update" argument -// to the wf callback should be set. -// -// wf is a function that may be called in each directory. -func Walk(c *config.Config, cexts []config.Configurer, dirs []string, mode Mode, wf WalkFunc) { - knownDirectives := make(map[string]bool) - for _, cext := range cexts { - for _, d := range cext.KnownDirectives() { - knownDirectives[d] = true - } - } - - symlinks := symlinkResolver{visited: []string{c.RepoRoot}} - - updateRels := buildUpdateRelMap(c.RepoRoot, dirs) - - var visit func(*config.Config, string, string, bool) - visit = func(c *config.Config, dir, rel string, updateParent bool) { - haveError := false - - // TODO: OPT: ReadDir stats all the files, which is slow. We just care about - // names and modes, so we should use something like - // golang.org/x/tools/internal/fastwalk to speed this up. - files, err := ioutil.ReadDir(dir) - if err != nil { - log.Print(err) - return - } - - f, err := loadBuildFile(c, rel, dir, files) - if err != nil { - log.Print(err) - haveError = true - } - - c = configure(cexts, knownDirectives, c, rel, f) - wc := getWalkConfig(c) - - var subdirs, regularFiles []string - for _, fi := range files { - base := fi.Name() - switch { - case base == "" || base[0] == '.' || wc.isExcluded(rel, base): - continue - - case fi.IsDir() || fi.Mode()&os.ModeSymlink != 0 && symlinks.follow(c, dir, rel, base): - subdirs = append(subdirs, base) - - default: - regularFiles = append(regularFiles, base) - } - } - - shouldUpdate := shouldUpdate(rel, mode, updateParent, updateRels) - for _, sub := range subdirs { - if subRel := path.Join(rel, sub); shouldVisit(subRel, mode, updateRels) { - visit(c, filepath.Join(dir, sub), subRel, shouldUpdate) - } - } - - update := !haveError && !wc.ignore && shouldUpdate - if shouldCall(rel, mode, updateRels) { - genFiles := findGenFiles(wc, f) - wf(dir, rel, c, update, f, subdirs, regularFiles, genFiles) - } - } - visit(c, c.RepoRoot, "", false) -} - -// buildUpdateRelMap builds a table of prefixes, used to determine which -// directories to update and visit. -// -// root and dirs must be absolute, canonical file paths. Each entry in dirs -// must be a subdirectory of root. The caller is responsible for checking this. -// -// buildUpdateRelMap returns a map from slash-separated paths relative to the -// root directory ("" for the root itself) to a boolean indicating whether -// the directory should be updated. -func buildUpdateRelMap(root string, dirs []string) map[string]bool { - relMap := make(map[string]bool) - for _, dir := range dirs { - rel, _ := filepath.Rel(root, dir) - rel = filepath.ToSlash(rel) - if rel == "." { - rel = "" - } - - i := 0 - for { - next := strings.IndexByte(rel[i:], '/') + i - if next-i < 0 { - relMap[rel] = true - break - } - prefix := rel[:next] - relMap[prefix] = relMap[prefix] // set to false if not present - i = next + 1 - } - } - return relMap -} - -// shouldCall returns true if Walk should call the callback in the -// directory rel. -func shouldCall(rel string, mode Mode, updateRels map[string]bool) bool { - return mode != UpdateDirsMode || updateRels[rel] -} - -// shouldUpdate returns true if Walk should pass true to the callback's update -// parameter in the directory rel. This indicates the build file should be -// updated. -func shouldUpdate(rel string, mode Mode, updateParent bool, updateRels map[string]bool) bool { - return mode == VisitAllUpdateSubdirsMode && updateParent || updateRels[rel] -} - -// shouldVisit returns true if Walk should visit the subdirectory rel. -func shouldVisit(rel string, mode Mode, updateRels map[string]bool) bool { - if mode != UpdateDirsMode { - return true - } - _, ok := updateRels[rel] - return ok -} - -func loadBuildFile(c *config.Config, pkg, dir string, files []os.FileInfo) (*rule.File, error) { - var err error - readDir := dir - readFiles := files - if c.ReadBuildFilesDir != "" { - readDir = filepath.Join(c.ReadBuildFilesDir, filepath.FromSlash(pkg)) - readFiles, err = ioutil.ReadDir(readDir) - if err != nil { - return nil, err - } - } - path := rule.MatchBuildFileName(readDir, c.ValidBuildFileNames, readFiles) - if path == "" { - return nil, nil - } - return rule.LoadFile(path, pkg) -} - -func configure(cexts []config.Configurer, knownDirectives map[string]bool, c *config.Config, rel string, f *rule.File) *config.Config { - if rel != "" { - c = c.Clone() - } - if f != nil { - for _, d := range f.Directives { - if !knownDirectives[d.Key] { - log.Printf("%s: unknown directive: gazelle:%s", f.Path, d.Key) - } - } - } - for _, cext := range cexts { - cext.Configure(c, rel, f) - } - return c -} - -func findGenFiles(wc *walkConfig, f *rule.File) []string { - if f == nil { - return nil - } - var strs []string - for _, r := range f.Rules { - for _, key := range []string{"out", "outs"} { - if s := r.AttrString(key); s != "" { - strs = append(strs, s) - } else if ss := r.AttrStrings(key); len(ss) > 0 { - strs = append(strs, ss...) - } - } - } - - var genFiles []string - for _, s := range strs { - if !wc.isExcluded(f.Pkg, s) { - genFiles = append(genFiles, s) - } - } - return genFiles -} - -type symlinkResolver struct { - visited []string -} - -// Decide if symlink dir/base should be followed. -func (r *symlinkResolver) follow(c *config.Config, dir, rel, base string) bool { - if dir == c.RepoRoot && strings.HasPrefix(base, "bazel-") { - // Links such as bazel-, bazel-out, bazel-genfiles are created by - // Bazel to point to internal build directories. - return false - } - - // See if the user has explicitly directed us to follow the link. - wc := getWalkConfig(c) - linkRel := path.Join(rel, base) - for _, follow := range wc.follow { - if linkRel == follow { - return true - } - } - - // See if the symlink points to a tree that has been already visited. - fullpath := filepath.Join(dir, base) - dest, err := filepath.EvalSymlinks(fullpath) - if err != nil { - return false - } - if !filepath.IsAbs(dest) { - dest, err = filepath.Abs(filepath.Join(dir, dest)) - if err != nil { - return false - } - } - for _, p := range r.visited { - if pathtools.HasPrefix(dest, p) || pathtools.HasPrefix(p, dest) { - return false - } - } - r.visited = append(r.visited, dest) - stat, err := os.Stat(fullpath) - if err != nil { - return false - } - return stat.IsDir() -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS deleted file mode 100644 index 2d919a47f9..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/CONTRIBUTORS +++ /dev/null @@ -1,15 +0,0 @@ -# People who have agreed to one of the CLAs and can contribute patches. -# The AUTHORS file lists the copyright holders; this file -# lists people. For example, Google employees are listed here -# but not in AUTHORS, because Google holds the copyright. -# -# https://developers.google.com/open-source/cla/individual -# https://developers.google.com/open-source/cla/corporate -# -# Names should be added to this file as: -# Name -Paul Bethe -Russ Cox -Laurent Le Brun -Justine Alexandra Roberts Tunney -Nilton Volpato \ No newline at end of file diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/LICENSE b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/LICENSE deleted file mode 100644 index 0adcb5d6f5..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel deleted file mode 100644 index 86ab608dba..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/BUILD.bazel +++ /dev/null @@ -1,19 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "lex.go", - "parse.y.go", - "print.go", - "quote.go", - "rewrite.go", - "rule.go", - "syntax.go", - "walk.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/buildtools/build", - importpath = "github.com/bazelbuild/buildtools/build", - visibility = ["//visibility:public"], - deps = ["//vendor/github.com/bazelbuild/buildtools/tables:go_default_library"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/lex.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/lex.go deleted file mode 100644 index adc9e1dfc9..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/lex.go +++ /dev/null @@ -1,870 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -// Lexical scanning for BUILD file parser. - -package build - -import ( - "bytes" - "fmt" - "strings" - "unicode/utf8" - - "github.com/bazelbuild/buildtools/tables" -) - -// Parse parses the input data and returns the corresponding parse tree. -// -// The filename is used only for generating error messages. -func Parse(filename string, data []byte) (*File, error) { - in := newInput(filename, data) - return in.parse() -} - -// An input represents a single input file being parsed. -type input struct { - // Lexing state. - filename string // name of input file, for errors - complete []byte // entire input - remaining []byte // remaining input - token []byte // token being scanned - lastToken string // most recently returned token, for error messages - pos Position // current input position - lineComments []Comment // accumulated line comments - suffixComments []Comment // accumulated suffix comments - endStmt int // position of the end of the current statement - depth int // nesting of [ ] { } ( ) - cleanLine bool // true if the current line only contains whitespace before the current position - indent int // current line indentation in spaces - indents []int // stack of indentation levels in spaces - - // Parser state. - file *File // returned top-level syntax tree - parseError error // error encountered during parsing - - // Comment assignment state. - pre []Expr // all expressions, in preorder traversal - post []Expr // all expressions, in postorder traversal -} - -func newInput(filename string, data []byte) *input { - // The syntax requires that each simple statement ends with '\n', however it's optional at EOF. - // If `data` doesn't end with '\n' we add it here to keep parser simple. - // It shouldn't affect neither the parsed tree nor its formatting. - data = append(data, '\n') - - return &input{ - filename: filename, - complete: data, - remaining: data, - pos: Position{Line: 1, LineRune: 1, Byte: 0}, - cleanLine: true, - indents: []int{0}, - endStmt: -1, // -1 denotes it's not inside a statement - } -} - -func (in *input) currentIndent() int { - return in.indents[len(in.indents)-1] -} - -// parse parses the input file. -func (in *input) parse() (f *File, err error) { - // The parser panics for both routine errors like syntax errors - // and for programmer bugs like array index errors. - // Turn both into error returns. Catching bug panics is - // especially important when processing many files. - defer func() { - if e := recover(); e != nil { - if e == in.parseError { - err = in.parseError - } else { - err = fmt.Errorf("%s:%d:%d: internal error: %v", in.filename, in.pos.Line, in.pos.LineRune, e) - } - } - }() - - // Invoke the parser generated from parse.y. - yyParse(in) - if in.parseError != nil { - return nil, in.parseError - } - in.file.Path = in.filename - - // Assign comments to nearby syntax. - in.assignComments() - - return in.file, nil -} - -// Error is called to report an error. -// When called by the generated code s is always "syntax error". -// Error does not return: it panics. -func (in *input) Error(s string) { - if s == "syntax error" && in.lastToken != "" { - s += " near " + in.lastToken - } - in.parseError = fmt.Errorf("%s:%d:%d: %v", in.filename, in.pos.Line, in.pos.LineRune, s) - panic(in.parseError) -} - -// eof reports whether the input has reached end of file. -func (in *input) eof() bool { - return len(in.remaining) == 0 -} - -// peekRune returns the next rune in the input without consuming it. -func (in *input) peekRune() int { - if len(in.remaining) == 0 { - return 0 - } - r, _ := utf8.DecodeRune(in.remaining) - return int(r) -} - -// readRune consumes and returns the next rune in the input. -func (in *input) readRune() int { - if len(in.remaining) == 0 { - in.Error("internal lexer error: readRune at EOF") - } - r, size := utf8.DecodeRune(in.remaining) - in.remaining = in.remaining[size:] - if r == '\n' { - in.pos.Line++ - in.pos.LineRune = 1 - } else { - in.pos.LineRune++ - } - in.pos.Byte += size - return int(r) -} - -// startToken marks the beginning of the next input token. -// It must be followed by a call to endToken, once the token has -// been consumed using readRune. -func (in *input) startToken(val *yySymType) { - in.token = in.remaining - val.tok = "" - val.pos = in.pos -} - -// yySymType (used in the next few functions) is defined by the -// generated parser. It is a struct containing all the fields listed -// in parse.y's %union [sic] section. - -// endToken marks the end of an input token. -// It records the actual token string in val.tok if the caller -// has not done that already. -func (in *input) endToken(val *yySymType) { - if val.tok == "" { - tok := string(in.token[:len(in.token)-len(in.remaining)]) - val.tok = tok - in.lastToken = val.tok - } -} - -// Lex is called from the generated parser to obtain the next input token. -// It returns the token value (either a rune like '+' or a symbolic token _FOR) -// and sets val to the data associated with the token. -// -// For all our input tokens, the associated data is -// val.Pos (the position where the token begins) -// and val.Token (the input string corresponding to the token). -func (in *input) Lex(val *yySymType) int { - // Skip past spaces, stopping at non-space or EOF. - countNL := 0 // number of newlines we've skipped past - for !in.eof() { - // If a single statement is split into multiple lines, we don't need - // to track indentations and unindentations within these lines. For example: - // - // def f( - // # This indentation should be ignored - // x): - // # This unindentation should be ignored - // # Actual indentation is from 0 to 2 spaces here - // return x - // - // To handle this case, when we reach the beginning of a statement we scan forward to see where - // it should end and record the number of input bytes remaining at that endpoint. - // - // If --format_bzl is set to false, top level blocks (e.g. an entire function definition) - // is considered as a single statement. - if in.endStmt != -1 && len(in.remaining) == in.endStmt { - in.endStmt = -1 - } - - // Skip over spaces. Count newlines so we can give the parser - // information about where top-level blank lines are, - // for top-level comment assignment. - c := in.peekRune() - if c == ' ' || c == '\t' || c == '\r' || c == '\n' { - if c == '\n' { - in.indent = 0 - in.cleanLine = true - if in.endStmt == -1 { - // Not in a statememt. Tell parser about top-level blank line. - in.startToken(val) - in.readRune() - in.endToken(val) - return '\n' - } - countNL++ - } else if c == ' ' && in.cleanLine { - in.indent++ - } - in.readRune() - continue - } - - // Comment runs to end of line. - if c == '#' { - // If a line contains just a comment its indentation level doesn't matter. - // Reset it to zero. - in.indent = 0 - in.cleanLine = true - - // Is this comment the only thing on its line? - // Find the last \n before this # and see if it's all - // spaces from there to here. - // If it's a suffix comment but the last non-space symbol before - // it is one of (, [, or {, treat it as a line comment that should be - // put inside the corresponding block. - i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n")) - prefix := bytes.TrimSpace(in.complete[i+1 : in.pos.Byte]) - isSuffix := true - if len(prefix) == 0 || - prefix[len(prefix)-1] == '[' || - prefix[len(prefix)-1] == '(' || - prefix[len(prefix)-1] == '{' { - isSuffix = false - } - - // Consume comment without the \n it ends with. - in.startToken(val) - for len(in.remaining) > 0 && in.peekRune() != '\n' { - in.readRune() - } - - in.endToken(val) - - val.tok = strings.TrimRight(val.tok, "\n") - in.lastToken = "comment" - - // If we are at top level (not in a rule), hand the comment to - // the parser as a _COMMENT token. The grammar is written - // to handle top-level comments itself. - if in.endStmt == -1 { - // Not in a statement. Tell parser about top-level comment. - return _COMMENT - } - - // Otherwise, save comment for later attachment to syntax tree. - if countNL > 1 { - in.lineComments = append(in.lineComments, Comment{val.pos, ""}) - } - if isSuffix { - in.suffixComments = append(in.suffixComments, Comment{val.pos, val.tok}) - } else { - in.lineComments = append(in.lineComments, Comment{val.pos, val.tok}) - } - countNL = 0 - continue - } - - if c == '\\' && len(in.remaining) >= 2 && in.remaining[1] == '\n' { - // We can ignore a trailing \ at end of line together with the \n. - in.readRune() - in.readRune() - continue - } - - // Found non-space non-comment. - break - } - - // Check for changes in indentation - // Skip if --format_bzl is set to false, if we're inside a statement, or if there were non-space - // characters before in the current line. - if tables.FormatBzlFiles && in.endStmt == -1 && in.cleanLine { - if in.indent > in.currentIndent() { - // A new indentation block starts - in.indents = append(in.indents, in.indent) - in.lastToken = "indent" - in.cleanLine = false - return _INDENT - } else if in.indent < in.currentIndent() { - // An indentation block ends - in.indents = in.indents[:len(in.indents)-1] - - // It's a syntax error if the current line indentation level in now greater than - // currentIndent(), should be either equal (a parent block continues) or still less - // (need to unindent more). - if in.indent > in.currentIndent() { - in.pos = val.pos - in.Error("unexpected indentation") - } - in.lastToken = "unindent" - return _UNINDENT - } - } - - in.cleanLine = false - - // If the file ends with an indented block, return the corresponding amounts of unindents. - if in.eof() && in.currentIndent() > 0 { - in.indents = in.indents[:len(in.indents)-1] - in.lastToken = "unindent" - return _UNINDENT - } - - // Found the beginning of the next token. - in.startToken(val) - defer in.endToken(val) - - // End of file. - if in.eof() { - in.lastToken = "EOF" - return _EOF - } - - // If endStmt is 0, we need to recompute where the end of the next statement is. - if in.endStmt == -1 { - in.endStmt = len(in.skipStmt(in.remaining)) - } - - // Punctuation tokens. - switch c := in.peekRune(); c { - case '[', '(', '{': - in.depth++ - in.readRune() - return c - - case ']', ')', '}': - in.depth-- - in.readRune() - return c - - case '.', ':', ';', ',': // single-char tokens - in.readRune() - return c - - case '<', '>', '=', '!', '+', '-', '*', '/', '%': // possibly followed by = - in.readRune() - if c == '/' && in.peekRune() == '/' { - // integer division - in.readRune() - } - - if in.peekRune() == '=' { - in.readRune() - switch c { - case '<': - return _LE - case '>': - return _GE - case '=': - return _EQ - case '!': - return _NE - default: - return _AUGM - } - } - return c - - case 'r': // possible beginning of raw quoted string - if len(in.remaining) < 2 || in.remaining[1] != '"' && in.remaining[1] != '\'' { - break - } - in.readRune() - c = in.peekRune() - fallthrough - - case '"', '\'': // quoted string - quote := c - if len(in.remaining) >= 3 && in.remaining[0] == byte(quote) && in.remaining[1] == byte(quote) && in.remaining[2] == byte(quote) { - // Triple-quoted string. - in.readRune() - in.readRune() - in.readRune() - var c1, c2, c3 int - for { - if in.eof() { - in.pos = val.pos - in.Error("unexpected EOF in string") - } - c1, c2, c3 = c2, c3, in.readRune() - if c1 == quote && c2 == quote && c3 == quote { - break - } - if c3 == '\\' { - if in.eof() { - in.pos = val.pos - in.Error("unexpected EOF in string") - } - in.readRune() - } - } - } else { - in.readRune() - for { - if in.eof() { - in.pos = val.pos - in.Error("unexpected EOF in string") - } - if in.peekRune() == '\n' { - in.Error("unexpected newline in string") - } - c := in.readRune() - if c == quote { - break - } - if c == '\\' { - if in.eof() { - in.pos = val.pos - in.Error("unexpected EOF in string") - } - in.readRune() - } - } - } - in.endToken(val) - s, triple, err := unquote(val.tok) - if err != nil { - in.Error(fmt.Sprint(err)) - } - val.str = s - val.triple = triple - return _STRING - } - - // Checked all punctuation. Must be identifier token. - if c := in.peekRune(); !isIdent(c) { - in.Error(fmt.Sprintf("unexpected input character %#q", c)) - } - - if !tables.FormatBzlFiles { - // Look for raw Python block (class, def, if, etc at beginning of line) and pass through. - if in.depth == 0 && in.pos.LineRune == 1 && hasPythonPrefix(in.remaining) { - // Find end of Python block and advance input beyond it. - // Have to loop calling readRune in order to maintain line number info. - rest := in.skipStmt(in.remaining) - for len(in.remaining) > len(rest) { - in.readRune() - } - return _PYTHON - } - } - - // Scan over alphanumeric identifier. - for { - c := in.peekRune() - if !isIdent(c) { - break - } - in.readRune() - } - - // Call endToken to set val.tok to identifier we just scanned, - // so we can look to see if val.tok is a keyword. - in.endToken(val) - if k := keywordToken[val.tok]; k != 0 { - return k - } - - return _IDENT -} - -// isIdent reports whether c is an identifier rune. -// We treat all non-ASCII runes as identifier runes. -func isIdent(c int) bool { - return '0' <= c && c <= '9' || - 'A' <= c && c <= 'Z' || - 'a' <= c && c <= 'z' || - c == '_' || - c >= 0x80 -} - -// keywordToken records the special tokens for -// strings that should not be treated as ordinary identifiers. -var keywordToken = map[string]int{ - "and": _AND, - "for": _FOR, - "if": _IF, - "else": _ELSE, - "elif": _ELIF, - "in": _IN, - "is": _IS, - "lambda": _LAMBDA, - "load": _LOAD, - "not": _NOT, - "or": _OR, - "def": _DEF, - "return": _RETURN, -} - -// Python scanning. -// About 1% of BUILD files embed arbitrary Python into the file. -// We do not attempt to parse it. Instead, we lex just enough to scan -// beyond it, treating the Python block as an unintepreted blob. - -// hasPythonPrefix reports whether p begins with a keyword that would -// introduce an uninterpreted Python block. -func hasPythonPrefix(p []byte) bool { - if tables.FormatBzlFiles { - return false - } - - for _, pre := range prefixes { - if hasPrefixSpace(p, pre) { - return true - } - } - return false -} - -// These keywords introduce uninterpreted Python blocks. -var prefixes = []string{ - "assert", - "class", - "def", - "del", - "for", - "if", - "try", - "else", - "elif", - "except", -} - -// hasPrefixSpace reports whether p begins with pre followed by a space or colon. -func hasPrefixSpace(p []byte, pre string) bool { - - if len(p) <= len(pre) || p[len(pre)] != ' ' && p[len(pre)] != '\t' && p[len(pre)] != ':' { - return false - } - for i := range pre { - if p[i] != pre[i] { - return false - } - } - return true -} - -// A utility function for the legacy formatter. -// Returns whether a given code starts with a top-level statement (maybe with some preceeding -// comments and blank lines) -func isOutsideBlock(b []byte) bool { - isBlankLine := true - isComment := false - for _, c := range b { - switch { - case c == ' ' || c == '\t' || c == '\r': - isBlankLine = false - case c == '#': - isBlankLine = false - isComment = true - case c == '\n': - isBlankLine = true - isComment = false - default: - if !isComment { - return isBlankLine - } - } - } - return true -} - -// skipStmt returns the data remaining after the statement beginning at p. -// It does not advance the input position. -// (The only reason for the input receiver is to be able to call in.Error.) -func (in *input) skipStmt(p []byte) []byte { - quote := byte(0) // if non-zero, the kind of quote we're in - tripleQuote := false // if true, the quote is a triple quote - depth := 0 // nesting depth for ( ) [ ] { } - var rest []byte // data after the Python block - - defer func() { - if quote != 0 { - in.Error("EOF scanning Python quoted string") - } - }() - - // Scan over input one byte at a time until we find - // an unindented, non-blank, non-comment line - // outside quoted strings and brackets. - for i := 0; i < len(p); i++ { - c := p[i] - if quote != 0 && c == quote && !tripleQuote { - quote = 0 - continue - } - if quote != 0 && c == quote && tripleQuote && i+2 < len(p) && p[i+1] == quote && p[i+2] == quote { - i += 2 - quote = 0 - tripleQuote = false - continue - } - if quote != 0 { - if c == '\\' { - i++ // skip escaped char - } - continue - } - if c == '\'' || c == '"' { - if i+2 < len(p) && p[i+1] == c && p[i+2] == c { - quote = c - tripleQuote = true - i += 2 - continue - } - quote = c - continue - } - - if depth == 0 && i > 0 && p[i] == '\n' && p[i-1] != '\\' { - // Possible stopping point. Save the earliest one we find. - if rest == nil { - rest = p[i:] - } - - if tables.FormatBzlFiles { - // In the bzl files mode we only care about the end of the statement, we've found it. - return rest - } - // In the legacy mode we need to find where the current block ends - if isOutsideBlock(p[i+1:]) { - return rest - } - // Not a stopping point after all. - rest = nil - - } - - switch c { - case '#': - // Skip comment. - for i < len(p) && p[i] != '\n' { - i++ - } - // Rewind 1 position back because \n should be handled at the next iteration - i-- - - case '(', '[', '{': - depth++ - - case ')', ']', '}': - depth-- - } - } - return rest -} - -// Comment assignment. -// We build two lists of all subexpressions, preorder and postorder. -// The preorder list is ordered by start location, with outer expressions first. -// The postorder list is ordered by end location, with outer expressions last. -// We use the preorder list to assign each whole-line comment to the syntax -// immediately following it, and we use the postorder list to assign each -// end-of-line comment to the syntax immediately preceding it. - -// order walks the expression adding it and its subexpressions to the -// preorder and postorder lists. -func (in *input) order(v Expr) { - if v != nil { - in.pre = append(in.pre, v) - } - switch v := v.(type) { - default: - panic(fmt.Errorf("order: unexpected type %T", v)) - case nil: - // nothing - case *End: - // nothing - case *File: - for _, stmt := range v.Stmt { - in.order(stmt) - } - case *CommentBlock: - // nothing - case *CallExpr: - in.order(v.X) - for _, x := range v.List { - in.order(x) - } - in.order(&v.End) - case *PythonBlock: - // nothing - case *LiteralExpr: - // nothing - case *StringExpr: - // nothing - case *DotExpr: - in.order(v.X) - case *ListExpr: - for _, x := range v.List { - in.order(x) - } - in.order(&v.End) - case *ListForExpr: - in.order(v.X) - for _, c := range v.For { - in.order(c) - } - in.order(&v.End) - case *SetExpr: - for _, x := range v.List { - in.order(x) - } - in.order(&v.End) - case *ForClauseWithIfClausesOpt: - in.order(v.For) - for _, c := range v.Ifs { - in.order(c) - } - case *ForClause: - for _, name := range v.Var { - in.order(name) - } - in.order(v.Expr) - case *IfClause: - in.order(v.Cond) - case *KeyValueExpr: - in.order(v.Key) - in.order(v.Value) - case *DictExpr: - for _, x := range v.List { - in.order(x) - } - in.order(&v.End) - case *TupleExpr: - for _, x := range v.List { - in.order(x) - } - in.order(&v.End) - case *UnaryExpr: - in.order(v.X) - case *BinaryExpr: - in.order(v.X) - in.order(v.Y) - case *ConditionalExpr: - in.order(v.Then) - in.order(v.Test) - in.order(v.Else) - case *ParenExpr: - in.order(v.X) - in.order(&v.End) - case *SliceExpr: - in.order(v.X) - in.order(v.From) - in.order(v.To) - in.order(v.Step) - case *IndexExpr: - in.order(v.X) - in.order(v.Y) - case *LambdaExpr: - for _, name := range v.Var { - in.order(name) - } - in.order(v.Expr) - case *ReturnExpr: - if v.X != nil { - in.order(v.X) - } - case *FuncDef: - for _, x := range v.Args { - in.order(x) - } - for _, x := range v.Body.Statements { - in.order(x) - } - case *ForLoop: - for _, x := range v.LoopVars { - in.order(x) - } - in.order(v.Iterable) - for _, x := range v.Body.Statements { - in.order(x) - } - case *IfElse: - for _, condition := range v.Conditions { - in.order(condition.If) - for _, x := range condition.Then.Statements { - in.order(x) - } - } - } - if v != nil { - in.post = append(in.post, v) - } -} - -// assignComments attaches comments to nearby syntax. -func (in *input) assignComments() { - // Generate preorder and postorder lists. - in.order(in.file) - - // Assign line comments to syntax immediately following. - line := in.lineComments - for _, x := range in.pre { - start, _ := x.Span() - xcom := x.Comment() - for len(line) > 0 && start.Byte >= line[0].Start.Byte { - xcom.Before = append(xcom.Before, line[0]) - line = line[1:] - } - } - - // Remaining line comments go at end of file. - in.file.After = append(in.file.After, line...) - - // Assign suffix comments to syntax immediately before. - suffix := in.suffixComments - for i := len(in.post) - 1; i >= 0; i-- { - x := in.post[i] - - // Do not assign suffix comments to file - switch x.(type) { - case *File: - continue - } - - _, end := x.Span() - xcom := x.Comment() - for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte { - xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1]) - suffix = suffix[:len(suffix)-1] - } - } - - // We assigned suffix comments in reverse. - // If multiple suffix comments were appended to the same - // expression node, they are now in reverse. Fix that. - for _, x := range in.post { - reverseComments(x.Comment().Suffix) - } - - // Remaining suffix comments go at beginning of file. - in.file.Before = append(in.file.Before, suffix...) -} - -// reverseComments reverses the []Comment list. -func reverseComments(list []Comment) { - for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 { - list[i], list[j] = list[j], list[i] - } -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/parse.y.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/parse.y.go deleted file mode 100755 index 55a59ea80d..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/parse.y.go +++ /dev/null @@ -1,1531 +0,0 @@ -//line build/parse.y:13 -package build - -import __yyfmt__ "fmt" - -//line build/parse.y:13 -//line build/parse.y:18 -type yySymType struct { - yys int - // input tokens - tok string // raw input syntax - str string // decoding of quoted string - pos Position // position of token - triple bool // was string triple quoted? - - // partial syntax trees - expr Expr - exprs []Expr - forc *ForClause - ifs []*IfClause - forifs *ForClauseWithIfClausesOpt - forsifs []*ForClauseWithIfClausesOpt - string *StringExpr - strings []*StringExpr - block CodeBlock - - // supporting information - comma Position // position of trailing comma in list, if present - lastRule Expr // most recent rule, to attach line comments to -} - -const _AUGM = 57346 -const _AND = 57347 -const _COMMENT = 57348 -const _EOF = 57349 -const _EQ = 57350 -const _FOR = 57351 -const _GE = 57352 -const _IDENT = 57353 -const _IF = 57354 -const _ELSE = 57355 -const _ELIF = 57356 -const _IN = 57357 -const _IS = 57358 -const _LAMBDA = 57359 -const _LOAD = 57360 -const _LE = 57361 -const _NE = 57362 -const _NOT = 57363 -const _OR = 57364 -const _PYTHON = 57365 -const _STRING = 57366 -const _DEF = 57367 -const _RETURN = 57368 -const _INDENT = 57369 -const _UNINDENT = 57370 -const ShiftInstead = 57371 -const _ASSERT = 57372 -const _UNARY = 57373 - -var yyToknames = [...]string{ - "$end", - "error", - "$unk", - "'%'", - "'('", - "')'", - "'*'", - "'+'", - "','", - "'-'", - "'.'", - "'/'", - "':'", - "'<'", - "'='", - "'>'", - "'['", - "']'", - "'{'", - "'}'", - "_AUGM", - "_AND", - "_COMMENT", - "_EOF", - "_EQ", - "_FOR", - "_GE", - "_IDENT", - "_IF", - "_ELSE", - "_ELIF", - "_IN", - "_IS", - "_LAMBDA", - "_LOAD", - "_LE", - "_NE", - "_NOT", - "_OR", - "_PYTHON", - "_STRING", - "_DEF", - "_RETURN", - "_INDENT", - "_UNINDENT", - "ShiftInstead", - "'\\n'", - "_ASSERT", - "_UNARY", - "';'", -} -var yyStatenames = [...]string{} - -const yyEofCode = 1 -const yyErrCode = 2 -const yyInitialStackSize = 16 - -//line build/parse.y:738 - -// Go helper code. - -// unary returns a unary expression with the given -// position, operator, and subexpression. -func unary(pos Position, op string, x Expr) Expr { - return &UnaryExpr{ - OpStart: pos, - Op: op, - X: x, - } -} - -// binary returns a binary expression with the given -// operands, position, and operator. -func binary(x Expr, pos Position, op string, y Expr) Expr { - _, xend := x.Span() - ystart, _ := y.Span() - return &BinaryExpr{ - X: x, - OpStart: pos, - Op: op, - LineBreak: xend.Line < ystart.Line, - Y: y, - } -} - -// isSimpleExpression returns whether an expression is simple and allowed to exist in -// compact forms of sequences. -// The formal criteria are the following: an expression is considered simple if it's -// a literal (variable, string or a number), a literal with a unary operator or an empty sequence. -func isSimpleExpression(expr *Expr) bool { - switch x := (*expr).(type) { - case *LiteralExpr, *StringExpr: - return true - case *UnaryExpr: - _, ok := x.X.(*LiteralExpr) - return ok - case *ListExpr: - return len(x.List) == 0 - case *TupleExpr: - return len(x.List) == 0 - case *DictExpr: - return len(x.List) == 0 - case *SetExpr: - return len(x.List) == 0 - default: - return false - } -} - -// forceCompact returns the setting for the ForceCompact field for a call or tuple. -// -// NOTE 1: The field is called ForceCompact, not ForceSingleLine, -// because it only affects the formatting associated with the call or tuple syntax, -// not the formatting of the arguments. For example: -// -// call([ -// 1, -// 2, -// 3, -// ]) -// -// is still a compact call even though it runs on multiple lines. -// -// In contrast the multiline form puts a linebreak after the (. -// -// call( -// [ -// 1, -// 2, -// 3, -// ], -// ) -// -// NOTE 2: Because of NOTE 1, we cannot use start and end on the -// same line as a signal for compact mode: the formatting of an -// embedded list might move the end to a different line, which would -// then look different on rereading and cause buildifier not to be -// idempotent. Instead, we have to look at properties guaranteed -// to be preserved by the reformatting, namely that the opening -// paren and the first expression are on the same line and that -// each subsequent expression begins on the same line as the last -// one ended (no line breaks after comma). -func forceCompact(start Position, list []Expr, end Position) bool { - if len(list) <= 1 { - // The call or tuple will probably be compact anyway; don't force it. - return false - } - - // If there are any named arguments or non-string, non-literal - // arguments, cannot force compact mode. - line := start.Line - for _, x := range list { - start, end := x.Span() - if start.Line != line { - return false - } - line = end.Line - if !isSimpleExpression(&x) { - return false - } - } - return end.Line == line -} - -// forceMultiLine returns the setting for the ForceMultiLine field. -func forceMultiLine(start Position, list []Expr, end Position) bool { - if len(list) > 1 { - // The call will be multiline anyway, because it has multiple elements. Don't force it. - return false - } - - if len(list) == 0 { - // Empty list: use position of brackets. - return start.Line != end.Line - } - - // Single-element list. - // Check whether opening bracket is on different line than beginning of - // element, or closing bracket is on different line than end of element. - elemStart, elemEnd := list[0].Span() - return start.Line != elemStart.Line || end.Line != elemEnd.Line -} - -//line yacctab:1 -var yyExca = [...]int{ - -1, 1, - 1, -1, - -2, 0, -} - -const yyPrivate = 57344 - -const yyLast = 739 - -var yyAct = [...]int{ - - 13, 112, 136, 2, 138, 74, 17, 7, 118, 69, - 34, 9, 117, 81, 130, 58, 31, 59, 35, 64, - 65, 66, 161, 30, 102, 70, 72, 77, 37, 38, - 166, 84, 108, 33, 79, 73, 76, 85, 84, 120, - 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 98, 99, 100, 101, 167, 103, 104, 105, 106, 86, - 163, 83, 110, 111, 154, 149, 153, 129, 64, 127, - 120, 109, 29, 120, 178, 87, 25, 115, 20, 126, - 120, 27, 116, 64, 133, 123, 120, 125, 24, 172, - 26, 134, 132, 131, 171, 61, 68, 71, 114, 28, - 168, 60, 113, 139, 145, 18, 22, 62, 121, 19, - 141, 158, 30, 148, 146, 147, 63, 40, 142, 82, - 39, 124, 147, 143, 67, 41, 150, 35, 80, 155, - 157, 152, 150, 36, 150, 156, 40, 1, 160, 39, - 42, 162, 43, 23, 41, 78, 165, 164, 75, 32, - 12, 8, 150, 4, 151, 21, 119, 122, 25, 0, - 20, 0, 169, 27, 0, 170, 0, 173, 174, 0, - 24, 175, 26, 165, 177, 7, 6, 0, 25, 11, - 0, 28, 16, 27, 0, 0, 0, 18, 22, 0, - 24, 19, 26, 15, 30, 10, 14, 25, 176, 20, - 5, 28, 27, 0, 0, 0, 0, 0, 22, 24, - 0, 26, 0, 0, 30, 6, 3, 25, 11, 20, - 28, 16, 27, 0, 0, 0, 18, 22, 0, 24, - 19, 26, 15, 30, 10, 14, 0, 0, 0, 5, - 28, 0, 0, 0, 0, 0, 18, 22, 0, 0, - 19, 0, 15, 30, 40, 14, 0, 39, 42, 137, - 43, 0, 41, 128, 44, 50, 45, 0, 0, 0, - 0, 51, 55, 0, 0, 46, 0, 49, 0, 57, - 0, 0, 52, 56, 0, 0, 47, 48, 53, 54, - 40, 0, 0, 39, 42, 0, 43, 0, 41, 159, - 44, 50, 45, 0, 0, 0, 0, 51, 55, 0, - 0, 46, 0, 49, 0, 57, 0, 0, 52, 56, - 0, 0, 47, 48, 53, 54, 40, 0, 0, 39, - 42, 0, 43, 0, 41, 0, 44, 50, 45, 0, - 144, 0, 0, 51, 55, 0, 0, 46, 0, 49, - 0, 57, 0, 0, 52, 56, 0, 0, 47, 48, - 53, 54, 40, 0, 0, 39, 42, 0, 43, 0, - 41, 0, 44, 50, 45, 0, 0, 0, 0, 51, - 55, 0, 0, 46, 120, 49, 0, 57, 0, 0, - 52, 56, 0, 0, 47, 48, 53, 54, 40, 0, - 0, 39, 42, 0, 43, 0, 41, 0, 44, 50, - 45, 0, 0, 0, 0, 51, 55, 0, 0, 46, - 0, 49, 0, 57, 140, 0, 52, 56, 0, 0, - 47, 48, 53, 54, 40, 0, 0, 39, 42, 0, - 43, 0, 41, 135, 44, 50, 45, 0, 0, 0, - 0, 51, 55, 0, 0, 46, 0, 49, 0, 57, - 0, 0, 52, 56, 0, 0, 47, 48, 53, 54, - 40, 0, 0, 39, 42, 0, 43, 0, 41, 107, - 44, 50, 45, 0, 0, 0, 0, 51, 55, 0, - 0, 46, 0, 49, 0, 57, 0, 0, 52, 56, - 0, 0, 47, 48, 53, 54, 40, 0, 0, 39, - 42, 0, 43, 0, 41, 0, 44, 50, 45, 0, - 0, 0, 0, 51, 55, 0, 0, 46, 0, 49, - 0, 57, 0, 0, 52, 56, 0, 0, 47, 48, - 53, 54, 40, 0, 0, 39, 42, 0, 43, 0, - 41, 0, 44, 50, 45, 0, 0, 0, 0, 51, - 55, 0, 0, 46, 0, 49, 0, 0, 0, 0, - 52, 56, 0, 0, 47, 48, 53, 54, 40, 0, - 0, 39, 42, 0, 43, 0, 41, 0, 44, 0, - 45, 0, 0, 0, 0, 0, 55, 0, 0, 46, - 0, 49, 0, 57, 0, 0, 52, 56, 0, 0, - 47, 48, 53, 54, 40, 0, 0, 39, 42, 0, - 43, 0, 41, 0, 44, 0, 45, 0, 0, 0, - 0, 0, 55, 0, 0, 46, 0, 49, 0, 25, - 0, 20, 52, 56, 27, 0, 47, 48, 53, 54, - 0, 24, 0, 26, 0, 40, 0, 0, 39, 42, - 0, 43, 28, 41, 0, 44, 0, 45, 18, 22, - 0, 0, 19, 55, 15, 30, 46, 14, 49, 0, - 0, 0, 0, 0, 0, 0, 0, 47, 48, 40, - 54, 0, 39, 42, 0, 43, 0, 41, 0, 44, - 0, 45, 0, 0, 0, 40, 0, 55, 39, 42, - 46, 43, 49, 41, 0, 44, 0, 45, 0, 0, - 0, 47, 48, 0, 0, 0, 46, 0, 49, 0, - 0, 0, 0, 0, 0, 0, 0, 47, 48, -} -var yyPact = [...]int{ - - -1000, -1000, 192, -1000, -1000, -1000, -31, -1000, -1000, -1000, - 5, 173, -2, 502, 71, -1000, 71, 90, 71, 71, - 71, -1000, 119, -18, 71, 71, 71, 173, -1000, -1000, - -1000, -1000, -37, 114, 29, 90, 71, 46, -1000, 71, - 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, - 71, 71, 71, -8, 71, 71, 71, 71, 502, 466, - 4, 71, 71, 89, 502, -1000, -1000, 71, -1000, 64, - 358, 99, 358, 115, 13, 59, 49, 250, 58, -1000, - -33, 634, 71, 71, 173, 430, 212, -1000, -1000, -1000, - -1000, 113, 113, 132, 132, 132, 132, 132, 132, 574, - 574, 651, 71, 685, 701, 651, 394, 212, -1000, 112, - 358, 322, 91, 71, 71, 107, -1000, 47, -1000, -1000, - 173, 71, -1000, 60, -1000, 44, -1000, -1000, 71, 71, - -1000, -1000, 105, 286, 90, 212, -1000, -22, -1000, 651, - 71, -1000, -1000, 54, -1000, 71, 610, 502, -1000, -1000, - -1000, 1, 22, -1000, -1000, 502, -1000, 250, 87, 212, - -1000, -1000, 610, -1000, 76, 502, 71, 71, 212, -1000, - 153, -1000, 71, 538, 538, -1000, -1000, 56, -1000, -} -var yyPgo = [...]int{ - - 0, 157, 0, 1, 6, 97, 9, 10, 156, 8, - 12, 155, 154, 3, 153, 151, 150, 4, 11, 149, - 5, 148, 145, 72, 143, 2, 137, 133, 128, -} -var yyR1 = [...]int{ - - 0, 26, 25, 25, 13, 13, 13, 13, 14, 14, - 15, 15, 15, 16, 16, 16, 27, 27, 17, 19, - 19, 18, 18, 18, 18, 28, 28, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 3, 3, 1, - 1, 20, 22, 22, 21, 21, 5, 5, 6, 6, - 7, 7, 23, 24, 24, 11, 8, 9, 10, 10, - 12, 12, -} -var yyR2 = [...]int{ - - 0, 2, 4, 1, 0, 2, 2, 3, 1, 1, - 7, 6, 1, 4, 5, 4, 2, 1, 4, 0, - 3, 1, 2, 1, 1, 0, 1, 1, 3, 4, - 4, 4, 6, 8, 5, 1, 3, 4, 4, 4, - 3, 3, 3, 2, 1, 4, 2, 2, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 4, 3, 3, 3, 5, 0, 1, 0, - 1, 3, 1, 3, 1, 2, 1, 3, 0, 2, - 1, 3, 1, 1, 2, 1, 4, 2, 1, 2, - 0, 3, -} -var yyChk = [...]int{ - - -1000, -26, -13, 24, -14, 47, 23, -17, -15, -18, - 42, 26, -16, -2, 43, 40, 29, -4, 34, 38, - 7, -11, 35, -24, 17, 5, 19, 10, 28, -23, - 41, 47, -19, 28, -7, -4, -27, 30, 31, 7, - 4, 12, 8, 10, 14, 16, 25, 36, 37, 27, - 15, 21, 32, 38, 39, 22, 33, 29, -2, -2, - 11, 5, 17, -5, -2, -2, -2, 5, -23, -6, - -2, -5, -2, -6, -20, -21, -6, -2, -22, -4, - -28, 50, 5, 32, 9, -2, 13, 29, -2, -2, - -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 32, -2, -2, -2, -2, 13, 28, -6, - -2, -2, -3, 13, 9, -6, 18, -10, -9, -8, - 26, 9, -1, -10, 6, -10, 20, 20, 13, 9, - 47, -18, -6, -2, -4, 13, -25, 47, -17, -2, - 30, -25, 6, -10, 18, 13, -2, -2, 6, 18, - -9, -12, -7, 6, 20, -2, -20, -2, 6, 13, - -25, 44, -2, 6, -3, -2, 29, 32, 13, -25, - -13, 18, 13, -2, -2, -25, 45, -3, 18, -} -var yyDef = [...]int{ - - 4, -2, 0, 1, 5, 6, 0, 8, 9, 19, - 0, 0, 12, 21, 23, 24, 0, 44, 0, 0, - 0, 27, 0, 35, 78, 78, 78, 0, 85, 83, - 82, 7, 25, 0, 0, 80, 0, 0, 17, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 22, 0, - 0, 78, 67, 0, 76, 46, 47, 78, 84, 0, - 76, 69, 76, 0, 72, 0, 0, 76, 74, 43, - 0, 26, 78, 0, 0, 0, 0, 16, 48, 49, - 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 0, 63, 64, 65, 0, 0, 28, 0, - 76, 68, 0, 0, 0, 0, 36, 0, 88, 90, - 0, 70, 79, 0, 42, 0, 40, 41, 0, 75, - 18, 20, 0, 0, 81, 0, 15, 0, 3, 62, - 0, 13, 30, 0, 31, 67, 45, 77, 29, 37, - 89, 87, 0, 38, 39, 71, 73, 0, 0, 0, - 14, 4, 66, 34, 0, 68, 0, 0, 0, 11, - 0, 32, 67, 91, 86, 10, 2, 0, 33, -} -var yyTok1 = [...]int{ - - 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 47, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, - 5, 6, 7, 8, 9, 10, 11, 12, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 13, 50, - 14, 15, 16, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 17, 3, 18, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 19, 3, 20, -} -var yyTok2 = [...]int{ - - 2, 3, 21, 22, 23, 24, 25, 26, 27, 28, - 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, -} -var yyTok3 = [...]int{ - 0, -} - -var yyErrorMessages = [...]struct { - state int - token int - msg string -}{} - -//line yaccpar:1 - -/* parser for yacc output */ - -var ( - yyDebug = 0 - yyErrorVerbose = false -) - -type yyLexer interface { - Lex(lval *yySymType) int - Error(s string) -} - -type yyParser interface { - Parse(yyLexer) int - Lookahead() int -} - -type yyParserImpl struct { - lval yySymType - stack [yyInitialStackSize]yySymType - char int -} - -func (p *yyParserImpl) Lookahead() int { - return p.char -} - -func yyNewParser() yyParser { - return &yyParserImpl{} -} - -const yyFlag = -1000 - -func yyTokname(c int) string { - if c >= 1 && c-1 < len(yyToknames) { - if yyToknames[c-1] != "" { - return yyToknames[c-1] - } - } - return __yyfmt__.Sprintf("tok-%v", c) -} - -func yyStatname(s int) string { - if s >= 0 && s < len(yyStatenames) { - if yyStatenames[s] != "" { - return yyStatenames[s] - } - } - return __yyfmt__.Sprintf("state-%v", s) -} - -func yyErrorMessage(state, lookAhead int) string { - const TOKSTART = 4 - - if !yyErrorVerbose { - return "syntax error" - } - - for _, e := range yyErrorMessages { - if e.state == state && e.token == lookAhead { - return "syntax error: " + e.msg - } - } - - res := "syntax error: unexpected " + yyTokname(lookAhead) - - // To match Bison, suggest at most four expected tokens. - expected := make([]int, 0, 4) - - // Look for shiftable tokens. - base := yyPact[state] - for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { - if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { - if len(expected) == cap(expected) { - return res - } - expected = append(expected, tok) - } - } - - if yyDef[state] == -2 { - i := 0 - for yyExca[i] != -1 || yyExca[i+1] != state { - i += 2 - } - - // Look for tokens that we accept or reduce. - for i += 2; yyExca[i] >= 0; i += 2 { - tok := yyExca[i] - if tok < TOKSTART || yyExca[i+1] == 0 { - continue - } - if len(expected) == cap(expected) { - return res - } - expected = append(expected, tok) - } - - // If the default action is to accept or reduce, give up. - if yyExca[i+1] != 0 { - return res - } - } - - for i, tok := range expected { - if i == 0 { - res += ", expecting " - } else { - res += " or " - } - res += yyTokname(tok) - } - return res -} - -func yylex1(lex yyLexer, lval *yySymType) (char, token int) { - token = 0 - char = lex.Lex(lval) - if char <= 0 { - token = yyTok1[0] - goto out - } - if char < len(yyTok1) { - token = yyTok1[char] - goto out - } - if char >= yyPrivate { - if char < yyPrivate+len(yyTok2) { - token = yyTok2[char-yyPrivate] - goto out - } - } - for i := 0; i < len(yyTok3); i += 2 { - token = yyTok3[i+0] - if token == char { - token = yyTok3[i+1] - goto out - } - } - -out: - if token == 0 { - token = yyTok2[1] /* unknown char */ - } - if yyDebug >= 3 { - __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) - } - return char, token -} - -func yyParse(yylex yyLexer) int { - return yyNewParser().Parse(yylex) -} - -func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int { - var yyn int - var yyVAL yySymType - var yyDollar []yySymType - _ = yyDollar // silence set and not used - yyS := yyrcvr.stack[:] - - Nerrs := 0 /* number of errors */ - Errflag := 0 /* error recovery flag */ - yystate := 0 - yyrcvr.char = -1 - yytoken := -1 // yyrcvr.char translated into internal numbering - defer func() { - // Make sure we report no lookahead when not parsing. - yystate = -1 - yyrcvr.char = -1 - yytoken = -1 - }() - yyp := -1 - goto yystack - -ret0: - return 0 - -ret1: - return 1 - -yystack: - /* put a state and value onto the stack */ - if yyDebug >= 4 { - __yyfmt__.Printf("char %v in %v\n", yyTokname(yytoken), yyStatname(yystate)) - } - - yyp++ - if yyp >= len(yyS) { - nyys := make([]yySymType, len(yyS)*2) - copy(nyys, yyS) - yyS = nyys - } - yyS[yyp] = yyVAL - yyS[yyp].yys = yystate - -yynewstate: - yyn = yyPact[yystate] - if yyn <= yyFlag { - goto yydefault /* simple state */ - } - if yyrcvr.char < 0 { - yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) - } - yyn += yytoken - if yyn < 0 || yyn >= yyLast { - goto yydefault - } - yyn = yyAct[yyn] - if yyChk[yyn] == yytoken { /* valid shift */ - yyrcvr.char = -1 - yytoken = -1 - yyVAL = yyrcvr.lval - yystate = yyn - if Errflag > 0 { - Errflag-- - } - goto yystack - } - -yydefault: - /* default state action */ - yyn = yyDef[yystate] - if yyn == -2 { - if yyrcvr.char < 0 { - yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) - } - - /* look through exception table */ - xi := 0 - for { - if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { - break - } - xi += 2 - } - for xi += 2; ; xi += 2 { - yyn = yyExca[xi+0] - if yyn < 0 || yyn == yytoken { - break - } - } - yyn = yyExca[xi+1] - if yyn < 0 { - goto ret0 - } - } - if yyn == 0 { - /* error ... attempt to resume parsing */ - switch Errflag { - case 0: /* brand new error */ - yylex.Error(yyErrorMessage(yystate, yytoken)) - Nerrs++ - if yyDebug >= 1 { - __yyfmt__.Printf("%s", yyStatname(yystate)) - __yyfmt__.Printf(" saw %s\n", yyTokname(yytoken)) - } - fallthrough - - case 1, 2: /* incompletely recovered error ... try again */ - Errflag = 3 - - /* find a state where "error" is a legal shift action */ - for yyp >= 0 { - yyn = yyPact[yyS[yyp].yys] + yyErrCode - if yyn >= 0 && yyn < yyLast { - yystate = yyAct[yyn] /* simulate a shift of "error" */ - if yyChk[yystate] == yyErrCode { - goto yystack - } - } - - /* the current p has no shift on "error", pop stack */ - if yyDebug >= 2 { - __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys) - } - yyp-- - } - /* there is no state on the stack with an error shift ... abort */ - goto ret1 - - case 3: /* no shift yet; clobber input char */ - if yyDebug >= 2 { - __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yytoken)) - } - if yytoken == yyEofCode { - goto ret1 - } - yyrcvr.char = -1 - yytoken = -1 - goto yynewstate /* try again in the same state */ - } - } - - /* reduction by production yyn */ - if yyDebug >= 2 { - __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate)) - } - - yynt := yyn - yypt := yyp - _ = yypt // guard against "declared and not used" - - yyp -= yyR2[yyn] - // yyp is now the index of $0. Perform the default action. Iff the - // reduced production is ε, $1 is possibly out of range. - if yyp+1 >= len(yyS) { - nyys := make([]yySymType, len(yyS)*2) - copy(nyys, yyS) - yyS = nyys - } - yyVAL = yyS[yyp+1] - - /* consult goto table to find next state */ - yyn = yyR1[yyn] - yyg := yyPgo[yyn] - yyj := yyg + yyS[yyp].yys + 1 - - if yyj >= yyLast { - yystate = yyAct[yyg] - } else { - yystate = yyAct[yyj] - if yyChk[yystate] != -yyn { - yystate = yyAct[yyg] - } - } - // dummy call; replaced with literal code - switch yynt { - - case 1: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:167 - { - yylex.(*input).file = &File{Stmt: yyDollar[1].exprs} - return 0 - } - case 2: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:174 - { - yyVAL.block = CodeBlock{ - Start: yyDollar[2].pos, - Statements: yyDollar[3].exprs, - End: End{Pos: yyDollar[4].pos}, - } - } - case 3: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:182 - { - // simple_stmt is never empty - start, _ := yyDollar[1].exprs[0].Span() - _, end := yyDollar[1].exprs[len(yyDollar[1].exprs)-1].Span() - yyVAL.block = CodeBlock{ - Start: start, - Statements: yyDollar[1].exprs, - End: End{Pos: end}, - } - } - case 4: - yyDollar = yyS[yypt-0 : yypt+1] - //line build/parse.y:194 - { - yyVAL.exprs = nil - yyVAL.lastRule = nil - } - case 5: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:199 - { - // If this statement follows a comment block, - // attach the comments to the statement. - if cb, ok := yyDollar[1].lastRule.(*CommentBlock); ok { - yyVAL.exprs = append(yyDollar[1].exprs[:len(yyDollar[1].exprs)-1], yyDollar[2].exprs...) - yyDollar[2].exprs[0].Comment().Before = cb.After - yyVAL.lastRule = yyDollar[2].exprs[len(yyDollar[2].exprs)-1] - break - } - - // Otherwise add to list. - yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[2].exprs...) - yyVAL.lastRule = yyDollar[2].exprs[len(yyDollar[2].exprs)-1] - - // Consider this input: - // - // foo() - // # bar - // baz() - // - // If we've just parsed baz(), the # bar is attached to - // foo() as an After comment. Make it a Before comment - // for baz() instead. - if x := yyDollar[1].lastRule; x != nil { - com := x.Comment() - // stmt is never empty - yyDollar[2].exprs[0].Comment().Before = com.After - com.After = nil - } - } - case 6: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:230 - { - // Blank line; sever last rule from future comments. - yyVAL.exprs = yyDollar[1].exprs - yyVAL.lastRule = nil - } - case 7: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:236 - { - yyVAL.exprs = yyDollar[1].exprs - yyVAL.lastRule = yyDollar[1].lastRule - if yyVAL.lastRule == nil { - cb := &CommentBlock{Start: yyDollar[2].pos} - yyVAL.exprs = append(yyVAL.exprs, cb) - yyVAL.lastRule = cb - } - com := yyVAL.lastRule.Comment() - com.After = append(com.After, Comment{Start: yyDollar[2].pos, Token: yyDollar[2].tok}) - } - case 8: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:250 - { - yyVAL.exprs = yyDollar[1].exprs - } - case 9: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:254 - { - yyVAL.exprs = []Expr{yyDollar[1].expr} - } - case 10: - yyDollar = yyS[yypt-7 : yypt+1] - //line build/parse.y:260 - { - yyVAL.expr = &FuncDef{ - Start: yyDollar[1].pos, - Name: yyDollar[2].tok, - ListStart: yyDollar[3].pos, - Args: yyDollar[4].exprs, - Body: yyDollar[7].block, - End: yyDollar[7].block.End, - ForceCompact: forceCompact(yyDollar[3].pos, yyDollar[4].exprs, yyDollar[5].pos), - ForceMultiLine: forceMultiLine(yyDollar[3].pos, yyDollar[4].exprs, yyDollar[5].pos), - } - } - case 11: - yyDollar = yyS[yypt-6 : yypt+1] - //line build/parse.y:273 - { - yyVAL.expr = &ForLoop{ - Start: yyDollar[1].pos, - LoopVars: yyDollar[2].exprs, - Iterable: yyDollar[4].expr, - Body: yyDollar[6].block, - End: yyDollar[6].block.End, - } - } - case 12: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:283 - { - yyVAL.expr = yyDollar[1].expr - } - case 13: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:289 - { - yyVAL.expr = &IfElse{ - Start: yyDollar[1].pos, - Conditions: []Condition{ - Condition{ - If: yyDollar[2].expr, - Then: yyDollar[4].block, - }, - }, - End: yyDollar[4].block.End, - } - } - case 14: - yyDollar = yyS[yypt-5 : yypt+1] - //line build/parse.y:302 - { - block := yyDollar[1].expr.(*IfElse) - block.Conditions = append(block.Conditions, Condition{ - If: yyDollar[3].expr, - Then: yyDollar[5].block, - }) - block.End = yyDollar[5].block.End - yyVAL.expr = block - } - case 15: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:312 - { - block := yyDollar[1].expr.(*IfElse) - block.Conditions = append(block.Conditions, Condition{ - Then: yyDollar[4].block, - }) - block.End = yyDollar[4].block.End - yyVAL.expr = block - } - case 18: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:327 - { - yyVAL.exprs = append([]Expr{yyDollar[1].expr}, yyDollar[2].exprs...) - yyVAL.lastRule = yyVAL.exprs[len(yyVAL.exprs)-1] - } - case 19: - yyDollar = yyS[yypt-0 : yypt+1] - //line build/parse.y:333 - { - yyVAL.exprs = []Expr{} - } - case 20: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:337 - { - yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) - } - case 22: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:344 - { - _, end := yyDollar[2].expr.Span() - yyVAL.expr = &ReturnExpr{ - X: yyDollar[2].expr, - End: end, - } - } - case 23: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:352 - { - yyVAL.expr = &ReturnExpr{End: yyDollar[1].pos} - } - case 24: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:356 - { - yyVAL.expr = &PythonBlock{Start: yyDollar[1].pos, Token: yyDollar[1].tok} - } - case 28: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:366 - { - yyVAL.expr = &DotExpr{ - X: yyDollar[1].expr, - Dot: yyDollar[2].pos, - NamePos: yyDollar[3].pos, - Name: yyDollar[3].tok, - } - } - case 29: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:375 - { - yyVAL.expr = &CallExpr{ - X: &LiteralExpr{Start: yyDollar[1].pos, Token: "load"}, - ListStart: yyDollar[2].pos, - List: yyDollar[3].exprs, - End: End{Pos: yyDollar[4].pos}, - ForceCompact: forceCompact(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), - ForceMultiLine: forceMultiLine(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), - } - } - case 30: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:386 - { - yyVAL.expr = &CallExpr{ - X: yyDollar[1].expr, - ListStart: yyDollar[2].pos, - List: yyDollar[3].exprs, - End: End{Pos: yyDollar[4].pos}, - ForceCompact: forceCompact(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), - ForceMultiLine: forceMultiLine(yyDollar[2].pos, yyDollar[3].exprs, yyDollar[4].pos), - } - } - case 31: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:397 - { - yyVAL.expr = &IndexExpr{ - X: yyDollar[1].expr, - IndexStart: yyDollar[2].pos, - Y: yyDollar[3].expr, - End: yyDollar[4].pos, - } - } - case 32: - yyDollar = yyS[yypt-6 : yypt+1] - //line build/parse.y:406 - { - yyVAL.expr = &SliceExpr{ - X: yyDollar[1].expr, - SliceStart: yyDollar[2].pos, - From: yyDollar[3].expr, - FirstColon: yyDollar[4].pos, - To: yyDollar[5].expr, - End: yyDollar[6].pos, - } - } - case 33: - yyDollar = yyS[yypt-8 : yypt+1] - //line build/parse.y:417 - { - yyVAL.expr = &SliceExpr{ - X: yyDollar[1].expr, - SliceStart: yyDollar[2].pos, - From: yyDollar[3].expr, - FirstColon: yyDollar[4].pos, - To: yyDollar[5].expr, - SecondColon: yyDollar[6].pos, - Step: yyDollar[7].expr, - End: yyDollar[8].pos, - } - } - case 34: - yyDollar = yyS[yypt-5 : yypt+1] - //line build/parse.y:430 - { - yyVAL.expr = &CallExpr{ - X: yyDollar[1].expr, - ListStart: yyDollar[2].pos, - List: []Expr{ - &ListForExpr{ - Brack: "", - Start: yyDollar[2].pos, - X: yyDollar[3].expr, - For: yyDollar[4].forsifs, - End: End{Pos: yyDollar[5].pos}, - }, - }, - End: End{Pos: yyDollar[5].pos}, - } - } - case 35: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:447 - { - if len(yyDollar[1].strings) == 1 { - yyVAL.expr = yyDollar[1].strings[0] - break - } - yyVAL.expr = yyDollar[1].strings[0] - for _, x := range yyDollar[1].strings[1:] { - _, end := yyVAL.expr.Span() - yyVAL.expr = binary(yyVAL.expr, end, "+", x) - } - } - case 36: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:459 - { - yyVAL.expr = &ListExpr{ - Start: yyDollar[1].pos, - List: yyDollar[2].exprs, - Comma: yyDollar[2].comma, - End: End{Pos: yyDollar[3].pos}, - ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), - } - } - case 37: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:469 - { - exprStart, _ := yyDollar[2].expr.Span() - yyVAL.expr = &ListForExpr{ - Brack: "[]", - Start: yyDollar[1].pos, - X: yyDollar[2].expr, - For: yyDollar[3].forsifs, - End: End{Pos: yyDollar[4].pos}, - ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, - } - } - case 38: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:481 - { - exprStart, _ := yyDollar[2].expr.Span() - yyVAL.expr = &ListForExpr{ - Brack: "()", - Start: yyDollar[1].pos, - X: yyDollar[2].expr, - For: yyDollar[3].forsifs, - End: End{Pos: yyDollar[4].pos}, - ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, - } - } - case 39: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:493 - { - exprStart, _ := yyDollar[2].expr.Span() - yyVAL.expr = &ListForExpr{ - Brack: "{}", - Start: yyDollar[1].pos, - X: yyDollar[2].expr, - For: yyDollar[3].forsifs, - End: End{Pos: yyDollar[4].pos}, - ForceMultiLine: yyDollar[1].pos.Line != exprStart.Line, - } - } - case 40: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:505 - { - yyVAL.expr = &DictExpr{ - Start: yyDollar[1].pos, - List: yyDollar[2].exprs, - Comma: yyDollar[2].comma, - End: End{Pos: yyDollar[3].pos}, - ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), - } - } - case 41: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:515 - { - yyVAL.expr = &SetExpr{ - Start: yyDollar[1].pos, - List: yyDollar[2].exprs, - Comma: yyDollar[2].comma, - End: End{Pos: yyDollar[3].pos}, - ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), - } - } - case 42: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:525 - { - if len(yyDollar[2].exprs) == 1 && yyDollar[2].comma.Line == 0 { - // Just a parenthesized expression, not a tuple. - yyVAL.expr = &ParenExpr{ - Start: yyDollar[1].pos, - X: yyDollar[2].exprs[0], - End: End{Pos: yyDollar[3].pos}, - ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), - } - } else { - yyVAL.expr = &TupleExpr{ - Start: yyDollar[1].pos, - List: yyDollar[2].exprs, - Comma: yyDollar[2].comma, - End: End{Pos: yyDollar[3].pos}, - ForceCompact: forceCompact(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), - ForceMultiLine: forceMultiLine(yyDollar[1].pos, yyDollar[2].exprs, yyDollar[3].pos), - } - } - } - case 43: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:545 - { - yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) - } - case 45: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:550 - { - yyVAL.expr = &LambdaExpr{ - Lambda: yyDollar[1].pos, - Var: yyDollar[2].exprs, - Colon: yyDollar[3].pos, - Expr: yyDollar[4].expr, - } - } - case 46: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:558 - { - yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) - } - case 47: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:559 - { - yyVAL.expr = unary(yyDollar[1].pos, yyDollar[1].tok, yyDollar[2].expr) - } - case 48: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:560 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 49: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:561 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 50: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:562 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 51: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:563 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 52: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:564 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 53: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:565 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 54: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:566 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 55: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:567 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 56: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:568 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 57: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:569 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 58: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:570 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 59: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:571 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 60: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:572 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 61: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:573 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 62: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:574 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, "not in", yyDollar[4].expr) - } - case 63: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:575 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 64: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:576 - { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - case 65: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:578 - { - if b, ok := yyDollar[3].expr.(*UnaryExpr); ok && b.Op == "not" { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, "is not", b.X) - } else { - yyVAL.expr = binary(yyDollar[1].expr, yyDollar[2].pos, yyDollar[2].tok, yyDollar[3].expr) - } - } - case 66: - yyDollar = yyS[yypt-5 : yypt+1] - //line build/parse.y:586 - { - yyVAL.expr = &ConditionalExpr{ - Then: yyDollar[1].expr, - IfStart: yyDollar[2].pos, - Test: yyDollar[3].expr, - ElseStart: yyDollar[4].pos, - Else: yyDollar[5].expr, - } - } - case 67: - yyDollar = yyS[yypt-0 : yypt+1] - //line build/parse.y:597 - { - yyVAL.expr = nil - } - case 69: - yyDollar = yyS[yypt-0 : yypt+1] - //line build/parse.y:607 - { - yyVAL.pos = Position{} - } - case 71: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:613 - { - yyVAL.expr = &KeyValueExpr{ - Key: yyDollar[1].expr, - Colon: yyDollar[2].pos, - Value: yyDollar[3].expr, - } - } - case 72: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:623 - { - yyVAL.exprs = []Expr{yyDollar[1].expr} - } - case 73: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:627 - { - yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) - } - case 74: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:633 - { - yyVAL.exprs = yyDollar[1].exprs - } - case 75: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:637 - { - yyVAL.exprs = yyDollar[1].exprs - } - case 76: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:643 - { - yyVAL.exprs = []Expr{yyDollar[1].expr} - } - case 77: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:647 - { - yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) - } - case 78: - yyDollar = yyS[yypt-0 : yypt+1] - //line build/parse.y:652 - { - yyVAL.exprs, yyVAL.comma = nil, Position{} - } - case 79: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:656 - { - yyVAL.exprs, yyVAL.comma = yyDollar[1].exprs, yyDollar[2].pos - } - case 80: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:662 - { - yyVAL.exprs = []Expr{yyDollar[1].expr} - } - case 81: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:666 - { - yyVAL.exprs = append(yyDollar[1].exprs, yyDollar[3].expr) - } - case 82: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:672 - { - yyVAL.string = &StringExpr{ - Start: yyDollar[1].pos, - Value: yyDollar[1].str, - TripleQuote: yyDollar[1].triple, - End: yyDollar[1].pos.add(yyDollar[1].tok), - Token: yyDollar[1].tok, - } - } - case 83: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:684 - { - yyVAL.strings = []*StringExpr{yyDollar[1].string} - } - case 84: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:688 - { - yyVAL.strings = append(yyDollar[1].strings, yyDollar[2].string) - } - case 85: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:694 - { - yyVAL.expr = &LiteralExpr{Start: yyDollar[1].pos, Token: yyDollar[1].tok} - } - case 86: - yyDollar = yyS[yypt-4 : yypt+1] - //line build/parse.y:700 - { - yyVAL.forc = &ForClause{ - For: yyDollar[1].pos, - Var: yyDollar[2].exprs, - In: yyDollar[3].pos, - Expr: yyDollar[4].expr, - } - } - case 87: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:710 - { - yyVAL.forifs = &ForClauseWithIfClausesOpt{ - For: yyDollar[1].forc, - Ifs: yyDollar[2].ifs, - } - } - case 88: - yyDollar = yyS[yypt-1 : yypt+1] - //line build/parse.y:719 - { - yyVAL.forsifs = []*ForClauseWithIfClausesOpt{yyDollar[1].forifs} - } - case 89: - yyDollar = yyS[yypt-2 : yypt+1] - //line build/parse.y:722 - { - yyVAL.forsifs = append(yyDollar[1].forsifs, yyDollar[2].forifs) - } - case 90: - yyDollar = yyS[yypt-0 : yypt+1] - //line build/parse.y:727 - { - yyVAL.ifs = nil - } - case 91: - yyDollar = yyS[yypt-3 : yypt+1] - //line build/parse.y:731 - { - yyVAL.ifs = append(yyDollar[1].ifs, &IfClause{ - If: yyDollar[2].pos, - Cond: yyDollar[3].expr, - }) - } - } - goto yystack /* stack new state and value */ -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/print.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/print.go deleted file mode 100644 index e4a0ecdd44..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/print.go +++ /dev/null @@ -1,719 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -// Printing of syntax trees. - -package build - -import ( - "bytes" - "fmt" - "strings" -) - -const nestedIndentation = 2 // Indentation of nested blocks -const listIndentation = 4 // Indentation of multiline expressions - -// Format returns the formatted form of the given BUILD file. -func Format(f *File) []byte { - pr := &printer{} - pr.file(f) - return pr.Bytes() -} - -// FormatString returns the string form of the given expression. -func FormatString(x Expr) string { - pr := &printer{} - switch x := x.(type) { - case *File: - pr.file(x) - default: - pr.expr(x, precLow) - } - return pr.String() -} - -// A printer collects the state during printing of a file or expression. -type printer struct { - bytes.Buffer // output buffer - comment []Comment // pending end-of-line comments - margin int // left margin (indent), a number of spaces - depth int // nesting depth inside ( ) [ ] { } -} - -// printf prints to the buffer. -func (p *printer) printf(format string, args ...interface{}) { - fmt.Fprintf(p, format, args...) -} - -// indent returns the position on the current line, in bytes, 0-indexed. -func (p *printer) indent() int { - b := p.Bytes() - n := 0 - for n < len(b) && b[len(b)-1-n] != '\n' { - n++ - } - return n -} - -// newline ends the current line, flushing end-of-line comments. -// It must only be called when printing a newline is known to be safe: -// when not inside an expression or when p.depth > 0. -// To break a line inside an expression that might not be enclosed -// in brackets of some kind, use breakline instead. -func (p *printer) newline() { - if len(p.comment) > 0 { - p.printf(" ") - for i, com := range p.comment { - if i > 0 { - p.trim() - p.printf("\n%*s", p.margin, "") - } - p.printf("%s", strings.TrimSpace(com.Token)) - } - p.comment = p.comment[:0] - } - - p.trim() - p.printf("\n%*s", p.margin, "") -} - -// breakline breaks the current line, inserting a continuation \ if needed. -// If no continuation \ is needed, breakline flushes end-of-line comments. -func (p *printer) breakline() { - if p.depth == 0 { - // Cannot have both final \ and comments. - p.printf(" \\\n%*s", p.margin, "") - return - } - - // Safe to use newline. - p.newline() -} - -// trim removes trailing spaces from the current line. -func (p *printer) trim() { - // Remove trailing space from line we're about to end. - b := p.Bytes() - n := len(b) - for n > 0 && b[n-1] == ' ' { - n-- - } - p.Truncate(n) -} - -// file formats the given file into the print buffer. -func (p *printer) file(f *File) { - for _, com := range f.Before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - p.statements(f.Stmt) - - for _, com := range f.After { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - // If the last expression is in an indented code block there can be spaces in the last line. - p.trim() -} - -func (p *printer) statements(stmts []Expr) { - for i, stmt := range stmts { - switch stmt := stmt.(type) { - case *CommentBlock: - // comments already handled - - case *PythonBlock: - for _, com := range stmt.Before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - p.printf("%s", stmt.Token) - p.newline() - - default: - p.expr(stmt, precLow) - - // Print an empty line break after the expression unless it's a code block. - // For a code block, the line break is generated by its last statement. - if !isCodeBlock(stmt) { - p.newline() - } - } - - for _, com := range stmt.Comment().After { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - if i+1 < len(stmts) && !compactStmt(stmt, stmts[i+1], p.margin == 0) { - p.newline() - } - } -} - -// compactStmt reports whether the pair of statements s1, s2 -// should be printed without an intervening blank line. -// We omit the blank line when both are subinclude statements -// and the second one has no leading comments. -func compactStmt(s1, s2 Expr, isTopLevel bool) bool { - if len(s2.Comment().Before) > 0 { - return false - } - - if isTopLevel { - return isCall(s1, "load") && isCall(s2, "load") - } else { - return !(isCodeBlock(s1) || isCodeBlock(s2)) - } -} - -// isCall reports whether x is a call to a function with the given name. -func isCall(x Expr, name string) bool { - c, ok := x.(*CallExpr) - if !ok { - return false - } - nam, ok := c.X.(*LiteralExpr) - if !ok { - return false - } - return nam.Token == name -} - -// isCodeBlock checks if the statement is a code block (def, if, for, etc.) -func isCodeBlock(x Expr) bool { - switch x.(type) { - case *FuncDef: - return true - case *ForLoop: - return true - case *IfElse: - return true - default: - return false - } -} - -// Expression formatting. - -// The expression formatter must introduce parentheses to force the -// meaning described by the parse tree. We preserve parentheses in the -// input, so extra parentheses are only needed if we have edited the tree. -// -// For example consider these expressions: -// (1) "x" "y" % foo -// (2) "x" + "y" % foo -// (3) "x" + ("y" % foo) -// (4) ("x" + "y") % foo -// When we parse (1), we represent the concatenation as an addition. -// However, if we print the addition back out without additional parens, -// as in (2), it has the same meaning as (3), which is not the original -// meaning. To preserve the original meaning we must add parens as in (4). -// -// To allow arbitrary rewrites to be formatted properly, we track full -// operator precedence while printing instead of just handling this one -// case of string concatenation. -// -// The precedences are assigned values low to high. A larger number -// binds tighter than a smaller number. All binary operators bind -// left-to-right. -const ( - precLow = iota - precAssign - precComma - precColon - precIn - precOr - precAnd - precCmp - precAdd - precMultiply - precSuffix - precUnary - precConcat -) - -// opPrec gives the precedence for operators found in a BinaryExpr. -var opPrec = map[string]int{ - "=": precAssign, - "+=": precAssign, - "-=": precAssign, - "*=": precAssign, - "/=": precAssign, - "//=": precAssign, - "%=": precAssign, - "or": precOr, - "and": precAnd, - "<": precCmp, - ">": precCmp, - "==": precCmp, - "!=": precCmp, - "<=": precCmp, - ">=": precCmp, - "+": precAdd, - "-": precAdd, - "*": precMultiply, - "/": precMultiply, - "//": precMultiply, - "%": precMultiply, -} - -// expr prints the expression v to the print buffer. -// The value outerPrec gives the precedence of the operator -// outside expr. If that operator binds tighter than v's operator, -// expr must introduce parentheses to preserve the meaning -// of the parse tree (see above). -func (p *printer) expr(v Expr, outerPrec int) { - // Emit line-comments preceding this expression. - // If we are in the middle of an expression but not inside ( ) [ ] { } - // then we cannot just break the line: we'd have to end it with a \. - // However, even then we can't emit line comments since that would - // end the expression. This is only a concern if we have rewritten - // the parse tree. If comments were okay before this expression in - // the original input they're still okay now, in the absense of rewrites. - // - // TODO(bazel-team): Check whether it is valid to emit comments right now, - // and if not, insert them earlier in the output instead, at the most - // recent \n not following a \ line. - if before := v.Comment().Before; len(before) > 0 { - // Want to print a line comment. - // Line comments must be at the current margin. - p.trim() - if p.indent() > 0 { - // There's other text on the line. Start a new line. - p.printf("\n") - } - // Re-indent to margin. - p.printf("%*s", p.margin, "") - for _, com := range before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - } - - // Do we introduce parentheses? - // The result depends on the kind of expression. - // Each expression type that might need parentheses - // calls addParen with its own precedence. - // If parentheses are necessary, addParen prints the - // opening parenthesis and sets parenthesized so that - // the code after the switch can print the closing one. - parenthesized := false - addParen := func(prec int) { - if prec < outerPrec { - p.printf("(") - p.depth++ - parenthesized = true - } - } - - switch v := v.(type) { - default: - panic(fmt.Errorf("printer: unexpected type %T", v)) - - case *LiteralExpr: - p.printf("%s", v.Token) - - case *StringExpr: - // If the Token is a correct quoting of Value, use it. - // This preserves the specific escaping choices that - // BUILD authors have made, and it also works around - // b/7272572. - if strings.HasPrefix(v.Token, `"`) { - s, triple, err := unquote(v.Token) - if s == v.Value && triple == v.TripleQuote && err == nil { - p.printf("%s", v.Token) - break - } - } - - p.printf("%s", quote(v.Value, v.TripleQuote)) - - case *DotExpr: - addParen(precSuffix) - p.expr(v.X, precSuffix) - p.printf(".%s", v.Name) - - case *IndexExpr: - addParen(precSuffix) - p.expr(v.X, precSuffix) - p.printf("[") - p.expr(v.Y, precLow) - p.printf("]") - - case *KeyValueExpr: - p.expr(v.Key, precLow) - p.printf(": ") - p.expr(v.Value, precLow) - - case *SliceExpr: - addParen(precSuffix) - p.expr(v.X, precSuffix) - p.printf("[") - if v.From != nil { - p.expr(v.From, precLow) - } - p.printf(":") - if v.To != nil { - p.expr(v.To, precLow) - } - if v.SecondColon.Byte != 0 { - p.printf(":") - if v.Step != nil { - p.expr(v.Step, precLow) - } - } - p.printf("]") - - case *UnaryExpr: - addParen(precUnary) - if v.Op == "not" { - p.printf("not ") // Requires a space after it. - } else { - p.printf("%s", v.Op) - } - p.expr(v.X, precUnary) - - case *LambdaExpr: - addParen(precColon) - p.printf("lambda ") - for i, name := range v.Var { - if i > 0 { - p.printf(", ") - } - p.expr(name, precLow) - } - p.printf(": ") - p.expr(v.Expr, precColon) - - case *BinaryExpr: - // Precedence: use the precedence of the operator. - // Since all binary expressions format left-to-right, - // it is okay for the left side to reuse the same operator - // without parentheses, so we use prec for v.X. - // For the same reason, the right side cannot reuse the same - // operator, or else a parse tree for a + (b + c), where the ( ) are - // not present in the source, will format as a + b + c, which - // means (a + b) + c. Treat the right expression as appearing - // in a context one precedence level higher: use prec+1 for v.Y. - // - // Line breaks: if we are to break the line immediately after - // the operator, introduce a margin at the current column, - // so that the second operand lines up with the first one and - // also so that neither operand can use space to the left. - // If the operator is an =, indent the right side another 4 spaces. - prec := opPrec[v.Op] - addParen(prec) - m := p.margin - if v.LineBreak { - p.margin = p.indent() - if v.Op == "=" { - p.margin += listIndentation - } - } - - p.expr(v.X, prec) - p.printf(" %s", v.Op) - if v.LineBreak { - p.breakline() - } else { - p.printf(" ") - } - p.expr(v.Y, prec+1) - p.margin = m - - case *ParenExpr: - p.seq("()", []Expr{v.X}, &v.End, modeParen, false, v.ForceMultiLine) - - case *CallExpr: - addParen(precSuffix) - p.expr(v.X, precSuffix) - p.seq("()", v.List, &v.End, modeCall, v.ForceCompact, v.ForceMultiLine) - - case *ListExpr: - p.seq("[]", v.List, &v.End, modeList, false, v.ForceMultiLine) - - case *SetExpr: - p.seq("{}", v.List, &v.End, modeList, false, v.ForceMultiLine) - - case *TupleExpr: - p.seq("()", v.List, &v.End, modeTuple, v.ForceCompact, v.ForceMultiLine) - - case *DictExpr: - var list []Expr - for _, x := range v.List { - list = append(list, x) - } - p.seq("{}", list, &v.End, modeDict, false, v.ForceMultiLine) - - case *ListForExpr: - p.listFor(v) - - case *ConditionalExpr: - addParen(precSuffix) - p.expr(v.Then, precSuffix) - p.printf(" if ") - p.expr(v.Test, precSuffix) - p.printf(" else ") - p.expr(v.Else, precSuffix) - - case *ReturnExpr: - p.printf("return") - if v.X != nil { - p.printf(" ") - p.expr(v.X, precSuffix) - } - - case *FuncDef: - p.printf("def ") - p.printf(v.Name) - p.seq("()", v.Args, &v.End, modeCall, v.ForceCompact, v.ForceMultiLine) - p.printf(":") - p.margin += nestedIndentation - p.newline() - p.statements(v.Body.Statements) - p.margin -= nestedIndentation - - case *ForLoop: - p.printf("for ") - for i, loopVar := range v.LoopVars { - if i > 0 { - p.printf(", ") - } - p.expr(loopVar, precLow) - } - p.printf(" in ") - p.expr(v.Iterable, precLow) - p.printf(":") - p.margin += nestedIndentation - p.newline() - p.statements(v.Body.Statements) - p.margin -= nestedIndentation - - case *IfElse: - for i, block := range v.Conditions { - if i == 0 { - p.printf("if ") - } else if block.If == nil { - p.newline() - p.printf("else") - } else { - p.newline() - p.printf("elif ") - } - - if block.If != nil { - p.expr(block.If, precLow) - } - p.printf(":") - p.margin += nestedIndentation - p.newline() - p.statements(block.Then.Statements) - p.margin -= nestedIndentation - } - } - - // Add closing parenthesis if needed. - if parenthesized { - p.depth-- - p.printf(")") - } - - // Queue end-of-line comments for printing when we - // reach the end of the line. - p.comment = append(p.comment, v.Comment().Suffix...) -} - -// A seqMode describes a formatting mode for a sequence of values, -// like a list or call arguments. -type seqMode int - -const ( - _ seqMode = iota - - modeCall // f(x) - modeList // [x] - modeTuple // (x,) - modeParen // (x) - modeDict // {x:y} - modeSeq // x, y -) - -// seq formats a list of values inside a given bracket pair (brack = "()", "[]", "{}"). -// The end node holds any trailing comments to be printed just before the -// closing bracket. -// The mode parameter specifies the sequence mode (see above). -// If multiLine is true, seq avoids the compact form even -// for 0- and 1-element sequences. -func (p *printer) seq(brack string, list []Expr, end *End, mode seqMode, forceCompact, forceMultiLine bool) { - p.printf("%s", brack[:1]) - p.depth++ - - // If there are line comments, force multiline - // so we can print the comments before the closing bracket. - for _, x := range list { - if len(x.Comment().Before) > 0 { - forceMultiLine = true - } - } - if len(end.Before) > 0 { - forceMultiLine = true - } - - // Resolve possibly ambiguous call arguments explicitly - // instead of depending on implicit resolution in logic below. - if forceMultiLine { - forceCompact = false - } - - switch { - case len(list) == 0 && !forceMultiLine: - // Compact form: print nothing. - - case len(list) == 1 && !forceMultiLine: - // Compact form. - p.expr(list[0], precLow) - // Tuple must end with comma, to mark it as a tuple. - if mode == modeTuple { - p.printf(",") - } - - case forceCompact: - // Compact form but multiple elements. - for i, x := range list { - if i > 0 { - p.printf(", ") - } - p.expr(x, precLow) - } - - default: - // Multi-line form. - p.margin += listIndentation - for i, x := range list { - // If we are about to break the line before the first - // element and there are trailing end-of-line comments - // waiting to be printed, delay them and print them as - // whole-line comments preceding that element. - // Do this by printing a newline ourselves and positioning - // so that the end-of-line comment, with the two spaces added, - // will line up with the current margin. - if i == 0 && len(p.comment) > 0 { - p.printf("\n%*s", p.margin-2, "") - } - - p.newline() - p.expr(x, precLow) - if mode != modeParen || i+1 < len(list) { - p.printf(",") - } - } - // Final comments. - for _, com := range end.Before { - p.newline() - p.printf("%s", strings.TrimSpace(com.Token)) - } - p.margin -= listIndentation - p.newline() - } - p.depth-- - p.printf("%s", brack[1:]) -} - -// listFor formats a ListForExpr (list comprehension). -// The single-line form is: -// [x for y in z if c] -// -// and the multi-line form is: -// [ -// x -// for y in z -// if c -// ] -// -func (p *printer) listFor(v *ListForExpr) { - multiLine := v.ForceMultiLine || len(v.End.Before) > 0 - - // space breaks the line in multiline mode - // or else prints a space. - space := func() { - if multiLine { - p.breakline() - } else { - p.printf(" ") - } - } - - if v.Brack != "" { - p.depth++ - p.printf("%s", v.Brack[:1]) - } - - if multiLine { - if v.Brack != "" { - p.margin += listIndentation - } - p.newline() - } - - p.expr(v.X, precLow) - - for _, c := range v.For { - space() - p.printf("for ") - for i, name := range c.For.Var { - if i > 0 { - p.printf(", ") - } - p.expr(name, precLow) - } - p.printf(" in ") - p.expr(c.For.Expr, precLow) - p.comment = append(p.comment, c.For.Comment().Suffix...) - - for _, i := range c.Ifs { - space() - p.printf("if ") - p.expr(i.Cond, precLow) - p.comment = append(p.comment, i.Comment().Suffix...) - } - p.comment = append(p.comment, c.Comment().Suffix...) - - } - - if multiLine { - for _, com := range v.End.Before { - p.newline() - p.printf("%s", strings.TrimSpace(com.Token)) - } - if v.Brack != "" { - p.margin -= listIndentation - } - p.newline() - } - - if v.Brack != "" { - p.printf("%s", v.Brack[1:]) - p.depth-- - } -} - -func (p *printer) isTopLevel() bool { - return p.margin == 0 -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/quote.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/quote.go deleted file mode 100644 index d5ffe8d452..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/quote.go +++ /dev/null @@ -1,262 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -// Python quoted strings. - -package build - -import ( - "bytes" - "fmt" - "strconv" - "strings" -) - -// unesc maps single-letter chars following \ to their actual values. -var unesc = [256]byte{ - 'a': '\a', - 'b': '\b', - 'f': '\f', - 'n': '\n', - 'r': '\r', - 't': '\t', - 'v': '\v', - '\\': '\\', - '\'': '\'', - '"': '"', -} - -// esc maps escape-worthy bytes to the char that should follow \. -var esc = [256]byte{ - '\a': 'a', - '\b': 'b', - '\f': 'f', - '\n': 'n', - '\r': 'r', - '\t': 't', - '\v': 'v', - '\\': '\\', - '\'': '\'', - '"': '"', -} - -// notEsc is a list of characters that can follow a \ in a string value -// without having to escape the \. That is, since ( is in this list, we -// quote the Go string "foo\\(bar" as the Python literal "foo\(bar". -// This really does happen in BUILD files, especially in strings -// being used as shell arguments containing regular expressions. -const notEsc = " !#$%&()*+,-./:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ{|}~" - -// unquote unquotes the quoted string, returning the actual -// string value, whether the original was triple-quoted, and -// an error describing invalid input. -func unquote(quoted string) (s string, triple bool, err error) { - // Check for raw prefix: means don't interpret the inner \. - raw := false - if strings.HasPrefix(quoted, "r") { - raw = true - quoted = quoted[1:] - } - - if len(quoted) < 2 { - err = fmt.Errorf("string literal too short") - return - } - - if quoted[0] != '"' && quoted[0] != '\'' || quoted[0] != quoted[len(quoted)-1] { - err = fmt.Errorf("string literal has invalid quotes") - } - - // Check for triple quoted string. - quote := quoted[0] - if len(quoted) >= 6 && quoted[1] == quote && quoted[2] == quote && quoted[:3] == quoted[len(quoted)-3:] { - triple = true - quoted = quoted[3 : len(quoted)-3] - } else { - quoted = quoted[1 : len(quoted)-1] - } - - // Now quoted is the quoted data, but no quotes. - // If we're in raw mode or there are no escapes, we're done. - if raw || !strings.Contains(quoted, `\`) { - s = quoted - return - } - - // Otherwise process quoted string. - // Each iteration processes one escape sequence along with the - // plain text leading up to it. - var buf bytes.Buffer - for { - // Remove prefix before escape sequence. - i := strings.Index(quoted, `\`) - if i < 0 { - i = len(quoted) - } - buf.WriteString(quoted[:i]) - quoted = quoted[i:] - - if len(quoted) == 0 { - break - } - - // Process escape sequence. - if len(quoted) == 1 { - err = fmt.Errorf(`truncated escape sequence \`) - return - } - - switch quoted[1] { - default: - // In Python, if \z (for some byte z) is not a known escape sequence - // then it appears as literal text in the string. - buf.WriteString(quoted[:2]) - quoted = quoted[2:] - - case '\n': - // Ignore the escape and the line break. - quoted = quoted[2:] - - case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '\'', '"': - // One-char escape - buf.WriteByte(unesc[quoted[1]]) - quoted = quoted[2:] - - case '0', '1', '2', '3', '4', '5', '6', '7': - // Octal escape, up to 3 digits. - n := int(quoted[1] - '0') - quoted = quoted[2:] - for i := 1; i < 3; i++ { - if len(quoted) == 0 || quoted[0] < '0' || '7' < quoted[0] { - break - } - n = n*8 + int(quoted[0]-'0') - quoted = quoted[1:] - } - if n >= 256 { - // NOTE: Python silently discards the high bit, - // so that '\541' == '\141' == 'a'. - // Let's see if we can avoid doing that in BUILD files. - err = fmt.Errorf(`invalid escape sequence \%03o`, n) - return - } - buf.WriteByte(byte(n)) - - case 'x': - // Hexadecimal escape, exactly 2 digits. - if len(quoted) < 4 { - err = fmt.Errorf(`truncated escape sequence %s`, quoted) - return - } - n, err1 := strconv.ParseInt(quoted[2:4], 16, 0) - if err1 != nil { - err = fmt.Errorf(`invalid escape sequence %s`, quoted[:4]) - return - } - buf.WriteByte(byte(n)) - quoted = quoted[4:] - } - } - - s = buf.String() - return -} - -// indexByte returns the index of the first instance of b in s, or else -1. -func indexByte(s string, b byte) int { - for i := 0; i < len(s); i++ { - if s[i] == b { - return i - } - } - return -1 -} - -// hex is a list of the hexadecimal digits, for use in quoting. -// We always print lower-case hexadecimal. -const hex = "0123456789abcdef" - -// quote returns the quoted form of the string value "x". -// If triple is true, quote uses the triple-quoted form """x""". -func quote(unquoted string, triple bool) string { - q := `"` - if triple { - q = `"""` - } - - var buf bytes.Buffer - buf.WriteString(q) - - for i := 0; i < len(unquoted); i++ { - c := unquoted[i] - if c == '"' && triple && (i+1 < len(unquoted) && unquoted[i+1] != '"' || i+2 < len(unquoted) && unquoted[i+2] != '"') { - // Can pass up to two quotes through, because they are followed by a non-quote byte. - buf.WriteByte(c) - if i+1 < len(unquoted) && unquoted[i+1] == '"' { - buf.WriteByte(c) - i++ - } - continue - } - if triple && c == '\n' { - // Can allow newline in triple-quoted string. - buf.WriteByte(c) - continue - } - if c == '\'' { - // Can allow ' since we always use ". - buf.WriteByte(c) - continue - } - if c == '\\' { - if i+1 < len(unquoted) && indexByte(notEsc, unquoted[i+1]) >= 0 { - // Can pass \ through when followed by a byte that - // known not to be a valid escape sequence and also - // that does not trigger an escape sequence of its own. - // Use this, because various BUILD files do. - buf.WriteByte('\\') - buf.WriteByte(unquoted[i+1]) - i++ - continue - } - } - if esc[c] != 0 { - buf.WriteByte('\\') - buf.WriteByte(esc[c]) - continue - } - if c < 0x20 || c >= 0x80 { - // BUILD files are supposed to be Latin-1, so escape all control and high bytes. - // I'd prefer to use \x here, but Blaze does not implement - // \x in quoted strings (b/7272572). - buf.WriteByte('\\') - buf.WriteByte(hex[c>>6]) // actually octal but reusing hex digits 0-7. - buf.WriteByte(hex[(c>>3)&7]) - buf.WriteByte(hex[c&7]) - /* - buf.WriteByte('\\') - buf.WriteByte('x') - buf.WriteByte(hex[c>>4]) - buf.WriteByte(hex[c&0xF]) - */ - continue - } - buf.WriteByte(c) - continue - } - - buf.WriteString(q) - return buf.String() -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rewrite.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rewrite.go deleted file mode 100644 index b7f331a3e0..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rewrite.go +++ /dev/null @@ -1,817 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -// Rewriting of high-level (not purely syntactic) BUILD constructs. - -package build - -import ( - "path" - "regexp" - "sort" - "strings" - - "github.com/bazelbuild/buildtools/tables" -) - -// For debugging: flag to disable certain rewrites. -var DisableRewrites []string - -// disabled reports whether the named rewrite is disabled. -func disabled(name string) bool { - for _, x := range DisableRewrites { - if name == x { - return true - } - } - return false -} - -// For debugging: allow sorting of these lists even with sorting otherwise disabled. -var AllowSort []string - -// allowedSort reports whether sorting is allowed in the named context. -func allowedSort(name string) bool { - for _, x := range AllowSort { - if name == x { - return true - } - } - return false -} - -// Rewrite applies the high-level Buildifier rewrites to f, modifying it in place. -// If info is non-nil, Rewrite updates it with information about the rewrite. -func Rewrite(f *File, info *RewriteInfo) { - // Allocate an info so that helpers can assume it's there. - if info == nil { - info = new(RewriteInfo) - } - - for _, r := range rewrites { - if !disabled(r.name) { - r.fn(f, info) - } - } -} - -// RewriteInfo collects information about what Rewrite did. -type RewriteInfo struct { - EditLabel int // number of label strings edited - NameCall int // number of calls with argument names added - SortCall int // number of call argument lists sorted - SortStringList int // number of string lists sorted - UnsafeSort int // number of unsafe string lists sorted - Log []string // log entries - may change -} - -func (info *RewriteInfo) String() string { - s := "" - if info.EditLabel > 0 { - s += " label" - } - if info.NameCall > 0 { - s += " callname" - } - if info.SortCall > 0 { - s += " callsort" - } - if info.SortStringList > 0 { - s += " listsort" - } - if info.UnsafeSort > 0 { - s += " unsafesort" - } - if s != "" { - s = s[1:] - } - return s -} - -// rewrites is the list of all Buildifier rewrites, in the order in which they are applied. -// The order here matters: for example, label canonicalization must happen -// before sorting lists of strings. -var rewrites = []struct { - name string - fn func(*File, *RewriteInfo) -}{ - {"callsort", sortCallArgs}, - {"label", fixLabels}, - {"listsort", sortStringLists}, - {"multiplus", fixMultilinePlus}, -} - -// leaveAlone reports whether any of the nodes on the stack are marked -// with a comment containing "buildifier: leave-alone". -func leaveAlone(stk []Expr, final Expr) bool { - for _, x := range stk { - if leaveAlone1(x) { - return true - } - } - if final != nil && leaveAlone1(final) { - return true - } - return false -} - -// hasComment reports whether x is marked with a comment that -// after being converted to lower case, contains the specified text. -func hasComment(x Expr, text string) bool { - for _, com := range x.Comment().Before { - if strings.Contains(strings.ToLower(com.Token), text) { - return true - } - } - return false -} - -// leaveAlone1 reports whether x is marked with a comment containing -// "buildifier: leave-alone", case-insensitive. -func leaveAlone1(x Expr) bool { - return hasComment(x, "buildifier: leave-alone") -} - -// doNotSort reports whether x is marked with a comment containing -// "do not sort", case-insensitive. -func doNotSort(x Expr) bool { - return hasComment(x, "do not sort") -} - -// keepSorted reports whether x is marked with a comment containing -// "keep sorted", case-insensitive. -func keepSorted(x Expr) bool { - return hasComment(x, "keep sorted") -} - -// fixLabels rewrites labels into a canonical form. -// -// First, it joins labels written as string addition, turning -// "//x" + ":y" (usually split across multiple lines) into "//x:y". -// -// Second, it removes redundant target qualifiers, turning labels like -// "//third_party/m4:m4" into "//third_party/m4" as well as ones like -// "@foo//:foo" into "@foo". -// -func fixLabels(f *File, info *RewriteInfo) { - joinLabel := func(p *Expr) { - add, ok := (*p).(*BinaryExpr) - if !ok || add.Op != "+" { - return - } - str1, ok := add.X.(*StringExpr) - if !ok || !strings.HasPrefix(str1.Value, "//") || strings.Contains(str1.Value, " ") { - return - } - str2, ok := add.Y.(*StringExpr) - if !ok || strings.Contains(str2.Value, " ") { - return - } - info.EditLabel++ - str1.Value += str2.Value - - // Deleting nodes add and str2. - // Merge comments from add, str1, and str2 and save in str1. - com1 := add.Comment() - com2 := str1.Comment() - com3 := str2.Comment() - com1.Before = append(com1.Before, com2.Before...) - com1.Before = append(com1.Before, com3.Before...) - com1.Suffix = append(com1.Suffix, com2.Suffix...) - com1.Suffix = append(com1.Suffix, com3.Suffix...) - *str1.Comment() = *com1 - - *p = str1 - } - - labelPrefix := "//" - if tables.StripLabelLeadingSlashes { - labelPrefix = "" - } - // labelRE matches label strings, e.g. @r//x/y/z:abc - // where $1 is @r//x/y/z, $2 is @r//, $3 is r, $4 is z, $5 is abc. - labelRE := regexp.MustCompile(`^(((?:@(\w+))?//|` + labelPrefix + `)(?:.+/)?([^:]*))(?::([^:]+))?$`) - - shortenLabel := func(v Expr) { - str, ok := v.(*StringExpr) - if !ok { - return - } - editPerformed := false - - if tables.StripLabelLeadingSlashes && strings.HasPrefix(str.Value, "//") { - if path.Dir(f.Path) == "." || !strings.HasPrefix(str.Value, "//:") { - editPerformed = true - str.Value = str.Value[2:] - } - } - - if tables.ShortenAbsoluteLabelsToRelative { - thisPackage := labelPrefix + path.Dir(f.Path) - if str.Value == thisPackage { - editPerformed = true - str.Value = ":" + path.Base(str.Value) - } else if strings.HasPrefix(str.Value, thisPackage+":") { - editPerformed = true - str.Value = str.Value[len(thisPackage):] - } - } - - m := labelRE.FindStringSubmatch(str.Value) - if m == nil { - return - } - if m[4] != "" && m[4] == m[5] { // e.g. //foo:foo - editPerformed = true - str.Value = m[1] - } else if m[3] != "" && m[4] == "" && m[3] == m[5] { // e.g. @foo//:foo - editPerformed = true - str.Value = "@" + m[3] - } - if editPerformed { - info.EditLabel++ - } - } - - Walk(f, func(v Expr, stk []Expr) { - switch v := v.(type) { - case *CallExpr: - if leaveAlone(stk, v) { - return - } - for i := range v.List { - if leaveAlone1(v.List[i]) { - continue - } - as, ok := v.List[i].(*BinaryExpr) - if !ok || as.Op != "=" { - continue - } - key, ok := as.X.(*LiteralExpr) - if !ok || !tables.IsLabelArg[key.Token] || tables.LabelBlacklist[callName(v)+"."+key.Token] { - continue - } - if leaveAlone1(as.Y) { - continue - } - if list, ok := as.Y.(*ListExpr); ok { - for i := range list.List { - if leaveAlone1(list.List[i]) { - continue - } - joinLabel(&list.List[i]) - shortenLabel(list.List[i]) - } - } - if set, ok := as.Y.(*SetExpr); ok { - for i := range set.List { - if leaveAlone1(set.List[i]) { - continue - } - joinLabel(&set.List[i]) - shortenLabel(set.List[i]) - } - } else { - joinLabel(&as.Y) - shortenLabel(as.Y) - } - } - } - }) -} - -// callName returns the name of the rule being called by call. -// If the call is not to a literal rule name, callName returns "". -func callName(call *CallExpr) string { - rule, ok := call.X.(*LiteralExpr) - if !ok { - return "" - } - return rule.Token -} - -// sortCallArgs sorts lists of named arguments to a call. -func sortCallArgs(f *File, info *RewriteInfo) { - Walk(f, func(v Expr, stk []Expr) { - call, ok := v.(*CallExpr) - if !ok { - return - } - if leaveAlone(stk, call) { - return - } - rule := callName(call) - if rule == "" { - return - } - - // Find the tail of the argument list with named arguments. - start := len(call.List) - for start > 0 && argName(call.List[start-1]) != "" { - start-- - } - - // Record information about each arg into a sortable list. - var args namedArgs - for i, x := range call.List[start:] { - name := argName(x) - args = append(args, namedArg{ruleNamePriority(rule, name), name, i, x}) - } - - // Sort the list and put the args back in the new order. - if sort.IsSorted(args) { - return - } - info.SortCall++ - sort.Sort(args) - for i, x := range args { - call.List[start+i] = x.expr - } - }) -} - -// ruleNamePriority maps a rule argument name to its sorting priority. -// It could use the auto-generated per-rule tables but for now it just -// falls back to the original list. -func ruleNamePriority(rule, arg string) int { - ruleArg := rule + "." + arg - if val, ok := tables.NamePriority[ruleArg]; ok { - return val - } - return tables.NamePriority[arg] - /* - list := ruleArgOrder[rule] - if len(list) == 0 { - return tables.NamePriority[arg] - } - for i, x := range list { - if x == arg { - return i - } - } - return len(list) - */ -} - -// If x is of the form key=value, argName returns the string key. -// Otherwise argName returns "". -func argName(x Expr) string { - if as, ok := x.(*BinaryExpr); ok && as.Op == "=" { - if id, ok := as.X.(*LiteralExpr); ok { - return id.Token - } - } - return "" -} - -// A namedArg records information needed for sorting -// a named call argument into its proper position. -type namedArg struct { - priority int // kind of name; first sort key - name string // name; second sort key - index int // original index; final sort key - expr Expr // name=value argument -} - -// namedArgs is a slice of namedArg that implements sort.Interface -type namedArgs []namedArg - -func (x namedArgs) Len() int { return len(x) } -func (x namedArgs) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x namedArgs) Less(i, j int) bool { - p := x[i] - q := x[j] - if p.priority != q.priority { - return p.priority < q.priority - } - if p.name != q.name { - return p.name < q.name - } - return p.index < q.index -} - -// sortStringLists sorts lists of string literals used as specific rule arguments. -func sortStringLists(f *File, info *RewriteInfo) { - Walk(f, func(v Expr, stk []Expr) { - switch v := v.(type) { - case *CallExpr: - if leaveAlone(stk, v) { - return - } - rule := callName(v) - for _, arg := range v.List { - if leaveAlone1(arg) { - continue - } - as, ok := arg.(*BinaryExpr) - if !ok || as.Op != "=" || leaveAlone1(as) || doNotSort(as) { - continue - } - key, ok := as.X.(*LiteralExpr) - if !ok { - continue - } - context := rule + "." + key.Token - if !tables.IsSortableListArg[key.Token] || tables.SortableBlacklist[context] { - continue - } - if disabled("unsafesort") && !tables.SortableWhitelist[context] && !allowedSort(context) { - continue - } - sortStringList(as.Y, info, context) - } - case *BinaryExpr: - if disabled("unsafesort") { - return - } - // "keep sorted" comment on x = list forces sorting of list. - as := v - if as.Op == "=" && keepSorted(as) { - sortStringList(as.Y, info, "?") - } - case *KeyValueExpr: - if disabled("unsafesort") { - return - } - // "keep sorted" before key: list also forces sorting of list. - if keepSorted(v) { - sortStringList(v.Value, info, "?") - } - case *ListExpr: - if disabled("unsafesort") { - return - } - // "keep sorted" comment above first list element also forces sorting of list. - if len(v.List) > 0 && keepSorted(v.List[0]) { - sortStringList(v, info, "?") - } - } - }) -} - -// SortStringList sorts x, a list of strings. -func SortStringList(x Expr) { - sortStringList(x, nil, "") -} - -// sortStringList sorts x, a list of strings. -// The list is broken by non-strings and by blank lines and comments into chunks. -// Each chunk is sorted in place. -func sortStringList(x Expr, info *RewriteInfo, context string) { - list, ok := x.(*ListExpr) - if !ok || len(list.List) < 2 || doNotSort(list.List[0]) { - return - } - - forceSort := keepSorted(list.List[0]) - - // TODO(bazel-team): Decide how to recognize lists that cannot - // be sorted. Avoiding all lists with comments avoids sorting - // lists that say explicitly, in some form or another, why they - // cannot be sorted. For example, many cc_test rules require - // certain order in their deps attributes. - if !forceSort { - if line, _ := hasComments(list); line { - return - } - } - - // Sort chunks of the list with no intervening blank lines or comments. - for i := 0; i < len(list.List); { - if _, ok := list.List[i].(*StringExpr); !ok { - i++ - continue - } - - j := i + 1 - for ; j < len(list.List); j++ { - if str, ok := list.List[j].(*StringExpr); !ok || len(str.Before) > 0 { - break - } - } - - var chunk []stringSortKey - for index, x := range list.List[i:j] { - chunk = append(chunk, makeSortKey(index, x.(*StringExpr))) - } - if !sort.IsSorted(byStringExpr(chunk)) || !isUniq(chunk) { - if info != nil { - info.SortStringList++ - if !tables.SortableWhitelist[context] { - info.UnsafeSort++ - info.Log = append(info.Log, "sort:"+context) - } - } - before := chunk[0].x.Comment().Before - chunk[0].x.Comment().Before = nil - - sort.Sort(byStringExpr(chunk)) - chunk = uniq(chunk) - - chunk[0].x.Comment().Before = before - for offset, key := range chunk { - list.List[i+offset] = key.x - } - list.List = append(list.List[:(i+len(chunk))], list.List[j:]...) - } - - i = j - } -} - -// uniq removes duplicates from a list, which must already be sorted. -// It edits the list in place. -func uniq(sortedList []stringSortKey) []stringSortKey { - out := sortedList[:0] - for _, sk := range sortedList { - if len(out) == 0 || sk.value != out[len(out)-1].value { - out = append(out, sk) - } - } - return out -} - -// isUniq reports whether the sorted list only contains unique elements. -func isUniq(list []stringSortKey) bool { - for i := range list { - if i+1 < len(list) && list[i].value == list[i+1].value { - return false - } - } - return true -} - -// If stk describes a call argument like rule(arg=...), callArgName -// returns the name of that argument, formatted as "rule.arg". -func callArgName(stk []Expr) string { - n := len(stk) - if n < 2 { - return "" - } - arg := argName(stk[n-1]) - if arg == "" { - return "" - } - call, ok := stk[n-2].(*CallExpr) - if !ok { - return "" - } - rule, ok := call.X.(*LiteralExpr) - if !ok { - return "" - } - return rule.Token + "." + arg -} - -// A stringSortKey records information about a single string literal to be -// sorted. The strings are first grouped into four phases: most strings, -// strings beginning with ":", strings beginning with "//", and strings -// beginning with "@". The next significant part of the comparison is the list -// of elements in the value, where elements are split at `.' and `:'. Finally -// we compare by value and break ties by original index. -type stringSortKey struct { - phase int - split []string - value string - original int - x Expr -} - -func makeSortKey(index int, x *StringExpr) stringSortKey { - key := stringSortKey{ - value: x.Value, - original: index, - x: x, - } - - switch { - case strings.HasPrefix(x.Value, ":"): - key.phase = 1 - case strings.HasPrefix(x.Value, "//") || (tables.StripLabelLeadingSlashes && !strings.HasPrefix(x.Value, "@")): - key.phase = 2 - case strings.HasPrefix(x.Value, "@"): - key.phase = 3 - } - - key.split = strings.Split(strings.Replace(x.Value, ":", ".", -1), ".") - return key -} - -// byStringExpr implements sort.Interface for a list of stringSortKey. -type byStringExpr []stringSortKey - -func (x byStringExpr) Len() int { return len(x) } -func (x byStringExpr) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byStringExpr) Less(i, j int) bool { - xi := x[i] - xj := x[j] - - if xi.phase != xj.phase { - return xi.phase < xj.phase - } - for k := 0; k < len(xi.split) && k < len(xj.split); k++ { - if xi.split[k] != xj.split[k] { - return xi.split[k] < xj.split[k] - } - } - if len(xi.split) != len(xj.split) { - return len(xi.split) < len(xj.split) - } - if xi.value != xj.value { - return xi.value < xj.value - } - return xi.original < xj.original -} - -// fixMultilinePlus turns -// -// ... + -// [ ... ] -// -// ... + -// call(...) -// -// into -// ... + [ -// ... -// ] -// -// ... + call( -// ... -// ) -// -// which typically works better with our aggressively compact formatting. -func fixMultilinePlus(f *File, info *RewriteInfo) { - - // List manipulation helpers. - // As a special case, we treat f([...]) as a list, mainly - // for glob. - - // isList reports whether x is a list. - var isList func(x Expr) bool - isList = func(x Expr) bool { - switch x := x.(type) { - case *ListExpr: - return true - case *CallExpr: - if len(x.List) == 1 { - return isList(x.List[0]) - } - } - return false - } - - // isMultiLine reports whether x is a multiline list. - var isMultiLine func(Expr) bool - isMultiLine = func(x Expr) bool { - switch x := x.(type) { - case *ListExpr: - return x.ForceMultiLine || len(x.List) > 1 - case *CallExpr: - if x.ForceMultiLine || len(x.List) > 1 && !x.ForceCompact { - return true - } - if len(x.List) == 1 { - return isMultiLine(x.List[0]) - } - } - return false - } - - // forceMultiLine tries to force the list x to use a multiline form. - // It reports whether it was successful. - var forceMultiLine func(Expr) bool - forceMultiLine = func(x Expr) bool { - switch x := x.(type) { - case *ListExpr: - // Already multi line? - if x.ForceMultiLine { - return true - } - // If this is a list containing a list, force the - // inner list to be multiline instead. - if len(x.List) == 1 && forceMultiLine(x.List[0]) { - return true - } - x.ForceMultiLine = true - return true - - case *CallExpr: - if len(x.List) == 1 { - return forceMultiLine(x.List[0]) - } - } - return false - } - - skip := map[Expr]bool{} - Walk(f, func(v Expr, stk []Expr) { - if skip[v] { - return - } - bin, ok := v.(*BinaryExpr) - if !ok || bin.Op != "+" { - return - } - - // Found a +. - // w + x + y + z parses as ((w + x) + y) + z, - // so chase down the left side to make a list of - // all the things being added together, separated - // by the BinaryExprs that join them. - // Mark them as "skip" so that when Walk recurses - // into the subexpressions, we won't reprocess them. - var all []Expr - for { - all = append(all, bin.Y, bin) - bin1, ok := bin.X.(*BinaryExpr) - if !ok || bin1.Op != "+" { - break - } - bin = bin1 - skip[bin] = true - } - all = append(all, bin.X) - - // Because the outermost expression was the - // rightmost one, the list is backward. Reverse it. - for i, j := 0, len(all)-1; i < j; i, j = i+1, j-1 { - all[i], all[j] = all[j], all[i] - } - - // The 'all' slice is alternating addends and BinaryExpr +'s: - // w, +, x, +, y, +, z - // If there are no lists involved, don't rewrite anything. - haveList := false - for i := 0; i < len(all); i += 2 { - if isList(all[i]) { - haveList = true - break - } - } - if !haveList { - return - } - - // Okay, there are lists. - // Consider each + next to a line break. - for i := 1; i < len(all); i += 2 { - bin := all[i].(*BinaryExpr) - if !bin.LineBreak { - continue - } - - // We're going to break the line after the +. - // If it is followed by a list, force that to be - // multiline instead. - if forceMultiLine(all[i+1]) { - bin.LineBreak = false - continue - } - - // If the previous list was multiline already, - // don't bother with the line break after - // the +. - if isMultiLine(all[i-1]) { - bin.LineBreak = false - continue - } - } - }) -} - -// hasComments reports whether any comments are associated with -// the list or its elements. -func hasComments(list *ListExpr) (line, suffix bool) { - com := list.Comment() - if len(com.Before) > 0 || len(com.After) > 0 || len(list.End.Before) > 0 { - line = true - } - if len(com.Suffix) > 0 { - suffix = true - } - for _, elem := range list.List { - com := elem.Comment() - if len(com.Before) > 0 { - line = true - } - if len(com.Suffix) > 0 { - suffix = true - } - } - return -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rule.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rule.go deleted file mode 100644 index 5f4b99c37d..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/rule.go +++ /dev/null @@ -1,315 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -// Rule-level API for inspecting and modifying a build.File syntax tree. - -package build - -import ( - "strings" - "path/filepath" -) - -// A Rule represents a single BUILD rule. -type Rule struct { - Call *CallExpr - ImplicitName string // The name which should be used if the name attribute is not set. See the comment on File.implicitRuleName. -} - -func (f *File) Rule(call *CallExpr) *Rule { - r := &Rule{call, ""} - if r.AttrString("name") == "" { - r.ImplicitName = f.implicitRuleName() - } - return r -} - -// Rules returns the rules in the file of the given kind (such as "go_library"). -// If kind == "", Rules returns all rules in the file. -func (f *File) Rules(kind string) []*Rule { - var all []*Rule - - for _, stmt := range f.Stmt { - call, ok := stmt.(*CallExpr) - if !ok { - continue - } - rule := f.Rule(call) - if kind != "" && rule.Kind() != kind { - continue - } - all = append(all, rule) - } - - return all -} - -// RuleAt returns the rule in the file that starts at the specified line, or null if no such rule. -func (f *File) RuleAt(linenum int) *Rule { - - for _, stmt := range f.Stmt { - call, ok := stmt.(*CallExpr) - if !ok { - continue - } - start, end := call.X.Span() - if start.Line <= linenum && linenum <= end.Line { - return f.Rule(call) - } - } - return nil -} - -// DelRules removes rules with the given kind and name from the file. -// An empty kind matches all kinds; an empty name matches all names. -// It returns the number of rules that were deleted. -func (f *File) DelRules(kind, name string) int { - var i int - for _, stmt := range f.Stmt { - if call, ok := stmt.(*CallExpr); ok { - r := f.Rule(call) - if (kind == "" || r.Kind() == kind) && - (name == "" || r.Name() == name) { - continue - } - } - f.Stmt[i] = stmt - i++ - } - n := len(f.Stmt) - i - f.Stmt = f.Stmt[:i] - return n -} - -// If a build file contains exactly one unnamed rule, and no rules in the file explicitly have the -// same name as the name of the directory the build file is in, we treat the unnamed rule as if it -// had the name of the directory containing the BUILD file. -// This is following a convention used in the Pants build system to cut down on boilerplate. -func (f *File) implicitRuleName() string { - // We disallow empty names in the top-level BUILD files. - dir := filepath.Dir(f.Path) - if dir == "." { - return "" - } - sawAnonymousRule := false - possibleImplicitName := filepath.Base(dir) - - for _, stmt := range f.Stmt { - call, ok := stmt.(*CallExpr) - if !ok { - continue - } - temp := &Rule{call, ""} - if temp.AttrString("name") == possibleImplicitName { - // A target explicitly has the name of the dir, so no implicit targets are allowed. - return "" - } - if temp.Kind() != "" && temp.AttrString("name") == "" { - if sawAnonymousRule { - return "" - } - sawAnonymousRule = true - } - } - if sawAnonymousRule { - return possibleImplicitName - } - return "" -} - -// Kind returns the rule's kind (such as "go_library"). -// The kind of the rule may be given by a literal or it may be a sequence of dot expressions that -// begins with a literal, if the call expression does not conform to either of these forms, an -// empty string will be returned -func (r *Rule) Kind() string { - var names []string - expr := r.Call.X - for { - x, ok := expr.(*DotExpr) - if !ok { - break - } - names = append(names, x.Name) - expr = x.X - } - x, ok := expr.(*LiteralExpr) - if !ok { - return "" - } - names = append(names, x.Token) - // Reverse the elements since the deepest expression contains the leading literal - for l, r := 0, len(names)-1; l < r; l, r = l+1, r-1 { - names[l], names[r] = names[r], names[l] - } - return strings.Join(names, ".") -} - -// SetKind changes rule's kind (such as "go_library"). -func (r *Rule) SetKind(kind string) { - names := strings.Split(kind, ".") - var expr Expr - expr = &LiteralExpr{Token: names[0]} - for _, name := range names[1:] { - expr = &DotExpr{X: expr, Name: name} - } - r.Call.X = expr -} - -// Name returns the rule's target name. -// If the rule has no explicit target name, Name returns the implicit name if there is one, else the empty string. -func (r *Rule) Name() string { - explicitName := r.AttrString("name") - if explicitName == "" { - return r.ImplicitName - } - return explicitName -} - -// AttrKeys returns the keys of all the rule's attributes. -func (r *Rule) AttrKeys() []string { - var keys []string - for _, expr := range r.Call.List { - if binExpr, ok := expr.(*BinaryExpr); ok && binExpr.Op == "=" { - if keyExpr, ok := binExpr.X.(*LiteralExpr); ok { - keys = append(keys, keyExpr.Token) - } - } - } - return keys -} - -// AttrDefn returns the BinaryExpr defining the rule's attribute with the given key. -// That is, the result is a *BinaryExpr with Op == "=". -// If the rule has no such attribute, AttrDefn returns nil. -func (r *Rule) AttrDefn(key string) *BinaryExpr { - for _, kv := range r.Call.List { - as, ok := kv.(*BinaryExpr) - if !ok || as.Op != "=" { - continue - } - k, ok := as.X.(*LiteralExpr) - if !ok || k.Token != key { - continue - } - return as - } - return nil -} - -// Attr returns the value of the rule's attribute with the given key -// (such as "name" or "deps"). -// If the rule has no such attribute, Attr returns nil. -func (r *Rule) Attr(key string) Expr { - as := r.AttrDefn(key) - if as == nil { - return nil - } - return as.Y -} - -// DelAttr deletes the rule's attribute with the named key. -// It returns the old value of the attribute, or nil if the attribute was not found. -func (r *Rule) DelAttr(key string) Expr { - list := r.Call.List - for i, kv := range list { - as, ok := kv.(*BinaryExpr) - if !ok || as.Op != "=" { - continue - } - k, ok := as.X.(*LiteralExpr) - if !ok || k.Token != key { - continue - } - copy(list[i:], list[i+1:]) - r.Call.List = list[:len(list)-1] - return as.Y - } - return nil -} - -// SetAttr sets the rule's attribute with the given key to value. -// If the rule has no attribute with the key, SetAttr appends -// one to the end of the rule's attribute list. -func (r *Rule) SetAttr(key string, val Expr) { - as := r.AttrDefn(key) - if as != nil { - as.Y = val - return - } - - r.Call.List = append(r.Call.List, - &BinaryExpr{ - X: &LiteralExpr{Token: key}, - Op: "=", - Y: val, - }, - ) -} - -// AttrLiteral returns the literal form of the rule's attribute -// with the given key (such as "cc_api_version"), only when -// that value is an identifier or number. -// If the rule has no such attribute or the attribute is not an identifier or number, -// AttrLiteral returns "". -func (r *Rule) AttrLiteral(key string) string { - lit, ok := r.Attr(key).(*LiteralExpr) - if !ok { - return "" - } - return lit.Token -} - -// AttrString returns the value of the rule's attribute -// with the given key (such as "name"), as a string. -// If the rule has no such attribute or the attribute has a non-string value, -// Attr returns the empty string. -func (r *Rule) AttrString(key string) string { - str, ok := r.Attr(key).(*StringExpr) - if !ok { - return "" - } - return str.Value -} - -// AttrStrings returns the value of the rule's attribute -// with the given key (such as "srcs"), as a []string. -// If the rule has no such attribute or the attribute is not -// a list of strings, AttrStrings returns a nil slice. -func (r *Rule) AttrStrings(key string) []string { - return Strings(r.Attr(key)) -} - -// Strings returns expr as a []string. -// If expr is not a list of string literals, -// Strings returns a nil slice instead. -// If expr is an empty list of string literals, -// returns a non-nil empty slice. -// (this allows differentiating between these two cases) -func Strings(expr Expr) []string { - list, ok := expr.(*ListExpr) - if !ok { - return nil - } - all := []string{} // not nil - for _, l := range list.List { - str, ok := l.(*StringExpr) - if !ok { - return nil - } - all = append(all, str.Value) - } - return all -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/syntax.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/syntax.go deleted file mode 100644 index e47341bd7d..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/syntax.go +++ /dev/null @@ -1,495 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -// Package build implements parsing and printing of BUILD files. -package build - -// Syntax data structure definitions. - -import ( - "strings" - "unicode/utf8" -) - -// A Position describes the position between two bytes of input. -type Position struct { - Line int // line in input (starting at 1) - LineRune int // rune in line (starting at 1) - Byte int // byte in input (starting at 0) -} - -// add returns the position at the end of s, assuming it starts at p. -func (p Position) add(s string) Position { - p.Byte += len(s) - if n := strings.Count(s, "\n"); n > 0 { - p.Line += n - s = s[strings.LastIndex(s, "\n")+1:] - p.LineRune = 1 - } - p.LineRune += utf8.RuneCountInString(s) - return p -} - -// An Expr represents an input element. -type Expr interface { - // Span returns the start and end position of the expression, - // excluding leading or trailing comments. - Span() (start, end Position) - - // Comment returns the comments attached to the expression. - // This method would normally be named 'Comments' but that - // would interfere with embedding a type of the same name. - Comment() *Comments -} - -// A Comment represents a single # comment. -type Comment struct { - Start Position - Token string // without trailing newline -} - -// Comments collects the comments associated with an expression. -type Comments struct { - Before []Comment // whole-line comments before this expression - Suffix []Comment // end-of-line comments after this expression - - // For top-level expressions only, After lists whole-line - // comments following the expression. - After []Comment -} - -// Comment returns the receiver. This isn't useful by itself, but -// a Comments struct is embedded into all the expression -// implementation types, and this gives each of those a Comment -// method to satisfy the Expr interface. -func (c *Comments) Comment() *Comments { - return c -} - -// A File represents an entire BUILD file. -type File struct { - Path string // file path, relative to workspace directory - Comments - Stmt []Expr -} - -func (f *File) Span() (start, end Position) { - if len(f.Stmt) == 0 { - return - } - start, _ = f.Stmt[0].Span() - _, end = f.Stmt[len(f.Stmt)-1].Span() - return start, end -} - -// A CommentBlock represents a top-level block of comments separate -// from any rule. -type CommentBlock struct { - Comments - Start Position -} - -func (x *CommentBlock) Span() (start, end Position) { - return x.Start, x.Start -} - -// A PythonBlock represents a blob of Python code, typically a def or for loop. -type PythonBlock struct { - Comments - Start Position - Token string // raw Python code, including final newline -} - -func (x *PythonBlock) Span() (start, end Position) { - return x.Start, x.Start.add(x.Token) -} - -// A LiteralExpr represents a literal identifier or number. -type LiteralExpr struct { - Comments - Start Position - Token string // identifier token -} - -func (x *LiteralExpr) Span() (start, end Position) { - return x.Start, x.Start.add(x.Token) -} - -// A StringExpr represents a single literal string. -type StringExpr struct { - Comments - Start Position - Value string // string value (decoded) - TripleQuote bool // triple quote output - End Position - - // To allow specific formatting of string literals, - // at least within our requirements, record the - // preferred form of Value. This field is a hint: - // it is only used if it is a valid quoted form for Value. - Token string -} - -func (x *StringExpr) Span() (start, end Position) { - return x.Start, x.End -} - -// An End represents the end of a parenthesized or bracketed expression. -// It is a place to hang comments. -type End struct { - Comments - Pos Position -} - -func (x *End) Span() (start, end Position) { - return x.Pos, x.Pos.add(")") -} - -// A CallExpr represents a function call expression: X(List). -type CallExpr struct { - Comments - X Expr - ListStart Position // position of ( - List []Expr - End // position of ) - ForceCompact bool // force compact (non-multiline) form when printing - ForceMultiLine bool // force multiline form when printing -} - -func (x *CallExpr) Span() (start, end Position) { - start, _ = x.X.Span() - return start, x.End.Pos.add(")") -} - -// A DotExpr represents a field selector: X.Name. -type DotExpr struct { - Comments - X Expr - Dot Position - NamePos Position - Name string -} - -func (x *DotExpr) Span() (start, end Position) { - start, _ = x.X.Span() - return start, x.NamePos.add(x.Name) -} - -// A ListForExpr represents a list comprehension expression: [X for ... if ...]. -type ListForExpr struct { - Comments - ForceMultiLine bool // split expression across multiple lines - Brack string // "", "()", or "[]" - Start Position - X Expr - For []*ForClauseWithIfClausesOpt - End -} - -func (x *ListForExpr) Span() (start, end Position) { - return x.Start, x.End.Pos.add("]") -} - -// A ForClause represents a for clause in a list comprehension: for Var in Expr. -type ForClause struct { - Comments - For Position - Var []Expr - In Position - Expr Expr -} - -func (x *ForClause) Span() (start, end Position) { - _, end = x.Expr.Span() - return x.For, end -} - -// An IfClause represents an if clause in a list comprehension: if Cond. -type IfClause struct { - Comments - If Position - Cond Expr -} - -func (x *IfClause) Span() (start, end Position) { - _, end = x.Cond.Span() - return x.If, end -} - -// A ForClauseWithIfClausesOpt represents a for clause in a list comprehension followed by optional -// if expressions: for ... in ... [if ... if ...] -type ForClauseWithIfClausesOpt struct { - Comments - For *ForClause - Ifs []*IfClause -} - -func (x *ForClauseWithIfClausesOpt) Span() (start, end Position) { - start, end = x.For.Span() - if len(x.Ifs) > 0 { - _, end = x.Ifs[len(x.Ifs)-1].Span() - } - - return start, end -} - -// A KeyValueExpr represents a dictionary entry: Key: Value. -type KeyValueExpr struct { - Comments - Key Expr - Colon Position - Value Expr -} - -func (x *KeyValueExpr) Span() (start, end Position) { - start, _ = x.Key.Span() - _, end = x.Value.Span() - return start, end -} - -// A DictExpr represents a dictionary literal: { List }. -type DictExpr struct { - Comments - Start Position - List []Expr // all *KeyValueExprs - Comma Position // position of trailing comma, if any - End - ForceMultiLine bool // force multiline form when printing -} - -func (x *DictExpr) Span() (start, end Position) { - return x.Start, x.End.Pos.add("}") -} - -// A ListExpr represents a list literal: [ List ]. -type ListExpr struct { - Comments - Start Position - List []Expr - Comma Position // position of trailing comma, if any - End - ForceMultiLine bool // force multiline form when printing -} - -func (x *ListExpr) Span() (start, end Position) { - return x.Start, x.End.Pos.add("]") -} - -// A SetExpr represents a set literal: { List }. -type SetExpr struct { - Comments - Start Position - List []Expr - Comma Position // position of trailing comma, if any - End - ForceMultiLine bool // force multiline form when printing -} - -func (x *SetExpr) Span() (start, end Position) { - return x.Start, x.End.Pos.add("}") -} - -// A TupleExpr represents a tuple literal: (List) -type TupleExpr struct { - Comments - Start Position - List []Expr - Comma Position // position of trailing comma, if any - End - ForceCompact bool // force compact (non-multiline) form when printing - ForceMultiLine bool // force multiline form when printing -} - -func (x *TupleExpr) Span() (start, end Position) { - return x.Start, x.End.Pos.add(")") -} - -// A UnaryExpr represents a unary expression: Op X. -type UnaryExpr struct { - Comments - OpStart Position - Op string - X Expr -} - -func (x *UnaryExpr) Span() (start, end Position) { - _, end = x.X.Span() - return x.OpStart, end -} - -// A BinaryExpr represents a binary expression: X Op Y. -type BinaryExpr struct { - Comments - X Expr - OpStart Position - Op string - LineBreak bool // insert line break between Op and Y - Y Expr -} - -func (x *BinaryExpr) Span() (start, end Position) { - start, _ = x.X.Span() - _, end = x.Y.Span() - return start, end -} - -// A ParenExpr represents a parenthesized expression: (X). -type ParenExpr struct { - Comments - Start Position - X Expr - End - ForceMultiLine bool // insert line break after opening ( and before closing ) -} - -func (x *ParenExpr) Span() (start, end Position) { - return x.Start, x.End.Pos.add(")") -} - -// A SliceExpr represents a slice expression: expr[from:to] or expr[from:to:step] . -type SliceExpr struct { - Comments - X Expr - SliceStart Position - From Expr - FirstColon Position - To Expr - SecondColon Position - Step Expr - End Position -} - -func (x *SliceExpr) Span() (start, end Position) { - start, _ = x.X.Span() - return start, x.End -} - -// An IndexExpr represents an index expression: X[Y]. -type IndexExpr struct { - Comments - X Expr - IndexStart Position - Y Expr - End Position -} - -func (x *IndexExpr) Span() (start, end Position) { - start, _ = x.X.Span() - return start, x.End -} - -// A LambdaExpr represents a lambda expression: lambda Var: Expr. -type LambdaExpr struct { - Comments - Lambda Position - Var []Expr - Colon Position - Expr Expr -} - -func (x *LambdaExpr) Span() (start, end Position) { - _, end = x.Expr.Span() - return x.Lambda, end -} - -// ConditionalExpr represents the conditional: X if TEST else ELSE. -type ConditionalExpr struct { - Comments - Then Expr - IfStart Position - Test Expr - ElseStart Position - Else Expr -} - -// Span returns the start and end position of the expression, -// excluding leading or trailing comments. -func (x *ConditionalExpr) Span() (start, end Position) { - start, _ = x.Then.Span() - _, end = x.Else.Span() - return start, end -} - -// A CodeBlock represents an indented code block. -type CodeBlock struct { - Statements []Expr - Start Position - End -} - -func (x *CodeBlock) Span() (start, end Position) { - return x.Start, x.End.Pos -} - -// A FuncDef represents a function definition expression: def foo(List):. -type FuncDef struct { - Comments - Start Position // position of def - Name string - ListStart Position // position of ( - Args []Expr - Body CodeBlock - End // position of the end - ForceCompact bool // force compact (non-multiline) form when printing - ForceMultiLine bool // force multiline form when printing -} - -func (x *FuncDef) Span() (start, end Position) { - return x.Start, x.End.Pos -} - -// A ReturnExpr represents a return statement: return f(x). -type ReturnExpr struct { - Comments - Start Position - X Expr - End Position -} - -func (x *ReturnExpr) Span() (start, end Position) { - return x.Start, x.End -} - -// A ForLoop represents a for loop block: for x in range(10):. -type ForLoop struct { - Comments - Start Position // position of for - LoopVars []Expr - Iterable Expr - Body CodeBlock - End // position of the end -} - -func (x *ForLoop) Span() (start, end Position) { - return x.Start, x.End.Pos -} - -// An IfElse represents an if-else blocks sequence: if x: ... elif y: ... else: ... . -type IfElse struct { - Comments - Start Position // position of if - Conditions []Condition - End // position of the end -} - -type Condition struct { - If Expr - Then CodeBlock -} - -func (x *IfElse) Span() (start, end Position) { - return x.Start, x.End.Pos -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/walk.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/walk.go deleted file mode 100644 index dadd7e3e47..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/build/walk.go +++ /dev/null @@ -1,135 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package build - -// Walk walks the expression tree v, calling f on all subexpressions -// in a preorder traversal. -// -// The stk argument is the stack of expressions in the recursion above x, -// from outermost to innermost. -// -func Walk(v Expr, f func(x Expr, stk []Expr)) { - var stack []Expr - walk1(&v, &stack, func(x Expr, stk []Expr) Expr { - f(x, stk) - return nil - }) -} - -// WalkAndUpdate walks the expression tree v, calling f on all subexpressions -// in a preorder traversal. If f returns a non-nil value, the tree is mutated. -// The new value replaces the old one. -// -// The stk argument is the stack of expressions in the recursion above x, -// from outermost to innermost. -// -func Edit(v Expr, f func(x Expr, stk []Expr) Expr) Expr { - var stack []Expr - return walk1(&v, &stack, f) -} - -// walk1 is the actual implementation of Walk and WalkAndUpdate. -// It has the same signature and meaning as Walk, -// except that it maintains in *stack the current stack -// of nodes. Using a pointer to a slice here ensures that -// as the stack grows and shrinks the storage can be -// reused for the next growth. -func walk1(v *Expr, stack *[]Expr, f func(x Expr, stk []Expr) Expr) Expr { - if v == nil { - return nil - } - - if res := f(*v, *stack); res != nil { - *v = res - } - *stack = append(*stack, *v) - switch v := (*v).(type) { - case *File: - for _, stmt := range v.Stmt { - walk1(&stmt, stack, f) - } - case *DotExpr: - walk1(&v.X, stack, f) - case *IndexExpr: - walk1(&v.X, stack, f) - walk1(&v.Y, stack, f) - case *KeyValueExpr: - walk1(&v.Key, stack, f) - walk1(&v.Value, stack, f) - case *SliceExpr: - walk1(&v.X, stack, f) - if v.From != nil { - walk1(&v.From, stack, f) - } - if v.To != nil { - walk1(&v.To, stack, f) - } - if v.Step != nil { - walk1(&v.Step, stack, f) - } - case *ParenExpr: - walk1(&v.X, stack, f) - case *UnaryExpr: - walk1(&v.X, stack, f) - case *BinaryExpr: - walk1(&v.X, stack, f) - walk1(&v.Y, stack, f) - case *LambdaExpr: - for i := range v.Var { - walk1(&v.Var[i], stack, f) - } - walk1(&v.Expr, stack, f) - case *CallExpr: - walk1(&v.X, stack, f) - for i := range v.List { - walk1(&v.List[i], stack, f) - } - case *ListExpr: - for i := range v.List { - walk1(&v.List[i], stack, f) - } - case *SetExpr: - for i := range v.List { - walk1(&v.List[i], stack, f) - } - case *TupleExpr: - for i := range v.List { - walk1(&v.List[i], stack, f) - } - case *DictExpr: - for i := range v.List { - walk1(&v.List[i], stack, f) - } - case *ListForExpr: - walk1(&v.X, stack, f) - for _, c := range v.For { - for j := range c.For.Var { - walk1(&c.For.Var[j], stack, f) - } - walk1(&c.For.Expr, stack, f) - for _, i := range c.Ifs { - walk1(&i.Cond, stack, f) - } - } - case *ConditionalExpr: - walk1(&v.Then, stack, f) - walk1(&v.Test, stack, f) - walk1(&v.Else, stack, f) - } - *stack = (*stack)[:len(*stack)-1] - return *v -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel deleted file mode 100644 index b6d904dffa..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/BUILD.bazel +++ /dev/null @@ -1,12 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "jsonparser.go", - "tables.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/bazelbuild/buildtools/tables", - importpath = "github.com/bazelbuild/buildtools/tables", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go deleted file mode 100644 index ca2bc4443a..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/jsonparser.go +++ /dev/null @@ -1,63 +0,0 @@ -/* -Copyright 2017 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package tables - -import ( - "encoding/json" - "io/ioutil" -) - -type Definitions struct { - IsLabelArg map[string]bool - LabelBlacklist map[string]bool - IsListArg map[string]bool - IsSortableListArg map[string]bool - SortableBlacklist map[string]bool - SortableWhitelist map[string]bool - NamePriority map[string]int - StripLabelLeadingSlashes bool - ShortenAbsoluteLabelsToRelative bool -} - -// ParseJSONDefinitions reads and parses JSON table definitions from file. -func ParseJSONDefinitions(file string) (Definitions, error) { - var definitions Definitions - - data, err := ioutil.ReadFile(file) - if err != nil { - return definitions, err - } - - err = json.Unmarshal(data, &definitions) - return definitions, err -} - -// ParseAndUpdateJSONDefinitions reads definitions from file and merges or -// overrides the values in memory. -func ParseAndUpdateJSONDefinitions(file string, merge bool) error { - definitions, err := ParseJSONDefinitions(file) - if err != nil { - return err - } - - if merge { - MergeTables(definitions.IsLabelArg, definitions.LabelBlacklist, definitions.IsListArg, definitions.IsSortableListArg, definitions.SortableBlacklist, definitions.SortableWhitelist, definitions.NamePriority, definitions.StripLabelLeadingSlashes, definitions.ShortenAbsoluteLabelsToRelative) - } else { - OverrideTables(definitions.IsLabelArg, definitions.LabelBlacklist, definitions.IsListArg, definitions.IsSortableListArg, definitions.SortableBlacklist, definitions.SortableWhitelist, definitions.NamePriority, definitions.StripLabelLeadingSlashes, definitions.ShortenAbsoluteLabelsToRelative) - } - return nil -} diff --git a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/tables.go b/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/tables.go deleted file mode 100644 index 6e0f6b2ba8..0000000000 --- a/vendor/repo-infra/vendor/github.com/bazelbuild/buildtools/tables/tables.go +++ /dev/null @@ -1,248 +0,0 @@ -/* -Copyright 2016 Google Inc. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -// Tables about what Buildifier can and cannot edit. -// Perhaps eventually this will be -// derived from the BUILD encyclopedia. - -package tables - -// IsLabelArg: a named argument to a rule call is considered to have a value -// that can be treated as a label or list of labels if the name -// is one of these names. There is a separate blacklist for -// rule-specific exceptions. -var IsLabelArg = map[string]bool{ - "app_target": true, - "appdir": true, - "base_package": true, - "build_deps": true, - "cc_deps": true, - "ccdeps": true, - "common_deps": true, - "compile_deps": true, - "compiler": true, - "data": true, - "default_visibility": true, - "dep": true, - "deps": true, - "deps_java": true, - "dont_depend_on": true, - "env_deps": true, - "envscripts": true, - "exported_deps": true, - "exports": true, - "externs_list": true, - "files": true, - "globals": true, - "implementation": true, - "implements": true, - "includes": true, - "interface": true, - "jar": true, - "jars": true, - "javadeps": true, - "lib_deps": true, - "library": true, - "malloc": true, - "model": true, - "mods": true, - "module_deps": true, - "module_target": true, - "of": true, - "plugins": true, - "proto_deps": true, - "proto_target": true, - "protos": true, - "resource": true, - "resources": true, - "runtime_deps": true, - "scope": true, - "shared_deps": true, - "similar_deps": true, - "source_jar": true, - "src": true, - "srcs": true, - "stripped_targets": true, - "suites": true, - "swigdeps": true, - "target": true, - "target_devices": true, - "target_platforms": true, - "template": true, - "test": true, - "tests": true, - "tests_deps": true, - "tool": true, - "tools": true, - "visibility": true, -} - -// LabelBlacklist is the list of call arguments that cannot be -// shortened, because they are not interpreted using the same -// rules as for other labels. -var LabelBlacklist = map[string]bool{ - // Shortening this can cause visibility checks to fail. - "package_group.includes": true, -} - -// By default, edit.types.IsList consults lang.TypeOf to determine if an arg is a list. -// You may override this using IsListArg. Specifying a name here overrides any value -// in lang.TypeOf. -var IsListArg = map[string]bool{} - -// IsSortableListArg: a named argument to a rule call is considered to be a sortable list -// if the name is one of these names. There is a separate blacklist for -// rule-specific exceptions. -var IsSortableListArg = map[string]bool{ - "cc_deps": true, - "common_deps": true, - "compile_deps": true, - "configs": true, - "constraints": true, - "data": true, - "default_visibility": true, - "deps": true, - "deps_java": true, - "exported_deps": true, - "exports": true, - "filegroups": true, - "files": true, - "hdrs": true, - "imports": true, - "includes": true, - "inherits": true, - "javadeps": true, - "lib_deps": true, - "module_deps": true, - "out": true, - "outs": true, - "packages": true, - "plugin_modules": true, - "proto_deps": true, - "protos": true, - "pubs": true, - "resources": true, - "runtime_deps": true, - "shared_deps": true, - "similar_deps": true, - "srcs": true, - "swigdeps": true, - "swig_includes": true, - "tags": true, - "tests": true, - "tools": true, - "to_start_extensions": true, - "visibility": true, -} - -// SortableBlacklist records specific rule arguments that must not be reordered. -var SortableBlacklist = map[string]bool{ - "genrule.outs": true, - "genrule.srcs": true, -} - -// SortableWhitelist records specific rule arguments that are guaranteed -// to be reorderable, because bazel re-sorts the list itself after reading the BUILD file. -var SortableWhitelist = map[string]bool{ - "cc_inc_library.hdrs": true, - "cc_library.hdrs": true, - "java_library.srcs": true, - "java_library.resources": true, - "java_binary.srcs": true, - "java_binary.resources": true, - "java_test.srcs": true, - "java_test.resources": true, - "java_library.constraints": true, - "java_import.constraints": true, -} - -// NamePriority maps an argument name to its sorting priority. -// -// NOTE(bazel-team): These are the old buildifier rules. It is likely that this table -// will change, perhaps swapping in a separate table for each call, -// derived from the order used in the Build Encyclopedia. -var NamePriority = map[string]int{ - "name": -99, - "gwt_name": -98, - "package_name": -97, - "visible_node_name": -96, // for boq_initial_css_modules and boq_jswire_test_suite - "size": -95, - "timeout": -94, - "testonly": -93, - "src": -92, - "srcdir": -91, - "srcs": -90, - "out": -89, - "outs": -88, - "hdrs": -87, - "has_services": -86, // before api versions, for proto - "include": -85, // before exclude, for glob - "of": -84, // for check_dependencies - "baseline": -83, // for searchbox_library - // All others sort here, at 0. - "destdir": 1, - "exports": 2, - "runtime_deps": 3, - "deps": 4, - "implementation": 5, - "implements": 6, - "alwayslink": 7, -} - -var StripLabelLeadingSlashes = false - -var ShortenAbsoluteLabelsToRelative = false - -var FormatBzlFiles = false - -// OverrideTables allows a user of the build package to override the special-case rules. The user-provided tables replace the built-in tables. -func OverrideTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) { - IsLabelArg = labelArg - LabelBlacklist = blacklist - IsListArg = listArg - IsSortableListArg = sortableListArg - SortableBlacklist = sortBlacklist - SortableWhitelist = sortWhitelist - NamePriority = namePriority - StripLabelLeadingSlashes = stripLabelLeadingSlashes - ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative -} - -// MergeTables allows a user of the build package to override the special-case rules. The user-provided tables are merged into the built-in tables. -func MergeTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) { - for k, v := range labelArg { - IsLabelArg[k] = v - } - for k, v := range blacklist { - LabelBlacklist[k] = v - } - for k, v := range listArg { - IsListArg[k] = v - } - for k, v := range sortableListArg { - IsSortableListArg[k] = v - } - for k, v := range sortBlacklist { - SortableBlacklist[k] = v - } - for k, v := range sortWhitelist { - SortableWhitelist[k] = v - } - for k, v := range namePriority { - NamePriority[k] = v - } - StripLabelLeadingSlashes = stripLabelLeadingSlashes || StripLabelLeadingSlashes - ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative || ShortenAbsoluteLabelsToRelative -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/BUILD.bazel b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/BUILD.bazel deleted file mode 100644 index 994aa2e0fd..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/BUILD.bazel +++ /dev/null @@ -1,20 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "doc.go", - "keysparsing.go", - "lexer.go", - "marshal.go", - "parser.go", - "position.go", - "token.go", - "toml.go", - "tomltree_create.go", - "tomltree_write.go", - ], - importmap = "k8s.io/repo-infra/vendor/github.com/pelletier/go-toml", - importpath = "github.com/pelletier/go-toml", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/LICENSE b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/LICENSE deleted file mode 100644 index 583bdae628..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/doc.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/doc.go deleted file mode 100644 index d5fd98c021..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/doc.go +++ /dev/null @@ -1,23 +0,0 @@ -// Package toml is a TOML parser and manipulation library. -// -// This version supports the specification as described in -// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md -// -// Marshaling -// -// Go-toml can marshal and unmarshal TOML documents from and to data -// structures. -// -// TOML document as a tree -// -// Go-toml can operate on a TOML document as a tree. Use one of the Load* -// functions to parse TOML data and obtain a Tree instance, then one of its -// methods to manipulate the tree. -// -// JSONPath-like queries -// -// The package github.com/pelletier/go-toml/query implements a system -// similar to JSONPath to quickly retrieve elements of a TOML document using a -// single expression. See the package documentation for more information. -// -package toml diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/fuzz.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/fuzz.go deleted file mode 100644 index 14570c8d35..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/fuzz.go +++ /dev/null @@ -1,31 +0,0 @@ -// +build gofuzz - -package toml - -func Fuzz(data []byte) int { - tree, err := LoadBytes(data) - if err != nil { - if tree != nil { - panic("tree must be nil if there is an error") - } - return 0 - } - - str, err := tree.ToTomlString() - if err != nil { - if str != "" { - panic(`str must be "" if there is an error`) - } - panic(err) - } - - tree, err = Load(str) - if err != nil { - if tree != nil { - panic("tree must be nil if there is an error") - } - return 0 - } - - return 1 -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/keysparsing.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/keysparsing.go deleted file mode 100644 index 284db64678..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/keysparsing.go +++ /dev/null @@ -1,85 +0,0 @@ -// Parsing keys handling both bare and quoted keys. - -package toml - -import ( - "bytes" - "errors" - "fmt" - "unicode" -) - -// Convert the bare key group string to an array. -// The input supports double quotation to allow "." inside the key name, -// but escape sequences are not supported. Lexers must unescape them beforehand. -func parseKey(key string) ([]string, error) { - groups := []string{} - var buffer bytes.Buffer - inQuotes := false - wasInQuotes := false - ignoreSpace := true - expectDot := false - - for _, char := range key { - if ignoreSpace { - if char == ' ' { - continue - } - ignoreSpace = false - } - switch char { - case '"': - if inQuotes { - groups = append(groups, buffer.String()) - buffer.Reset() - wasInQuotes = true - } - inQuotes = !inQuotes - expectDot = false - case '.': - if inQuotes { - buffer.WriteRune(char) - } else { - if !wasInQuotes { - if buffer.Len() == 0 { - return nil, errors.New("empty table key") - } - groups = append(groups, buffer.String()) - buffer.Reset() - } - ignoreSpace = true - expectDot = false - wasInQuotes = false - } - case ' ': - if inQuotes { - buffer.WriteRune(char) - } else { - expectDot = true - } - default: - if !inQuotes && !isValidBareChar(char) { - return nil, fmt.Errorf("invalid bare character: %c", char) - } - if !inQuotes && expectDot { - return nil, errors.New("what?") - } - buffer.WriteRune(char) - expectDot = false - } - } - if inQuotes { - return nil, errors.New("mismatched quotes") - } - if buffer.Len() > 0 { - groups = append(groups, buffer.String()) - } - if len(groups) == 0 { - return nil, errors.New("empty key") - } - return groups, nil -} - -func isValidBareChar(r rune) bool { - return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r) -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/lexer.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/lexer.go deleted file mode 100644 index d11de42859..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/lexer.go +++ /dev/null @@ -1,750 +0,0 @@ -// TOML lexer. -// -// Written using the principles developed by Rob Pike in -// http://www.youtube.com/watch?v=HxaD_trXwRE - -package toml - -import ( - "bytes" - "errors" - "fmt" - "regexp" - "strconv" - "strings" -) - -var dateRegexp *regexp.Regexp - -// Define state functions -type tomlLexStateFn func() tomlLexStateFn - -// Define lexer -type tomlLexer struct { - inputIdx int - input []rune // Textual source - currentTokenStart int - currentTokenStop int - tokens []token - depth int - line int - col int - endbufferLine int - endbufferCol int -} - -// Basic read operations on input - -func (l *tomlLexer) read() rune { - r := l.peek() - if r == '\n' { - l.endbufferLine++ - l.endbufferCol = 1 - } else { - l.endbufferCol++ - } - l.inputIdx++ - return r -} - -func (l *tomlLexer) next() rune { - r := l.read() - - if r != eof { - l.currentTokenStop++ - } - return r -} - -func (l *tomlLexer) ignore() { - l.currentTokenStart = l.currentTokenStop - l.line = l.endbufferLine - l.col = l.endbufferCol -} - -func (l *tomlLexer) skip() { - l.next() - l.ignore() -} - -func (l *tomlLexer) fastForward(n int) { - for i := 0; i < n; i++ { - l.next() - } -} - -func (l *tomlLexer) emitWithValue(t tokenType, value string) { - l.tokens = append(l.tokens, token{ - Position: Position{l.line, l.col}, - typ: t, - val: value, - }) - l.ignore() -} - -func (l *tomlLexer) emit(t tokenType) { - l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop])) -} - -func (l *tomlLexer) peek() rune { - if l.inputIdx >= len(l.input) { - return eof - } - return l.input[l.inputIdx] -} - -func (l *tomlLexer) peekString(size int) string { - maxIdx := len(l.input) - upperIdx := l.inputIdx + size // FIXME: potential overflow - if upperIdx > maxIdx { - upperIdx = maxIdx - } - return string(l.input[l.inputIdx:upperIdx]) -} - -func (l *tomlLexer) follow(next string) bool { - return next == l.peekString(len(next)) -} - -// Error management - -func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn { - l.tokens = append(l.tokens, token{ - Position: Position{l.line, l.col}, - typ: tokenError, - val: fmt.Sprintf(format, args...), - }) - return nil -} - -// State functions - -func (l *tomlLexer) lexVoid() tomlLexStateFn { - for { - next := l.peek() - switch next { - case '[': - return l.lexTableKey - case '#': - return l.lexComment(l.lexVoid) - case '=': - return l.lexEqual - case '\r': - fallthrough - case '\n': - l.skip() - continue - } - - if isSpace(next) { - l.skip() - } - - if l.depth > 0 { - return l.lexRvalue - } - - if isKeyStartChar(next) { - return l.lexKey - } - - if next == eof { - l.next() - break - } - } - - l.emit(tokenEOF) - return nil -} - -func (l *tomlLexer) lexRvalue() tomlLexStateFn { - for { - next := l.peek() - switch next { - case '.': - return l.errorf("cannot start float with a dot") - case '=': - return l.lexEqual - case '[': - l.depth++ - return l.lexLeftBracket - case ']': - l.depth-- - return l.lexRightBracket - case '{': - return l.lexLeftCurlyBrace - case '}': - return l.lexRightCurlyBrace - case '#': - return l.lexComment(l.lexRvalue) - case '"': - return l.lexString - case '\'': - return l.lexLiteralString - case ',': - return l.lexComma - case '\r': - fallthrough - case '\n': - l.skip() - if l.depth == 0 { - return l.lexVoid - } - return l.lexRvalue - case '_': - return l.errorf("cannot start number with underscore") - } - - if l.follow("true") { - return l.lexTrue - } - - if l.follow("false") { - return l.lexFalse - } - - if l.follow("inf") { - return l.lexInf - } - - if l.follow("nan") { - return l.lexNan - } - - if isSpace(next) { - l.skip() - continue - } - - if next == eof { - l.next() - break - } - - possibleDate := l.peekString(35) - dateMatch := dateRegexp.FindString(possibleDate) - if dateMatch != "" { - l.fastForward(len(dateMatch)) - return l.lexDate - } - - if next == '+' || next == '-' || isDigit(next) { - return l.lexNumber - } - - if isAlphanumeric(next) { - return l.lexKey - } - - return l.errorf("no value can start with %c", next) - } - - l.emit(tokenEOF) - return nil -} - -func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn { - l.next() - l.emit(tokenLeftCurlyBrace) - return l.lexRvalue -} - -func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn { - l.next() - l.emit(tokenRightCurlyBrace) - return l.lexRvalue -} - -func (l *tomlLexer) lexDate() tomlLexStateFn { - l.emit(tokenDate) - return l.lexRvalue -} - -func (l *tomlLexer) lexTrue() tomlLexStateFn { - l.fastForward(4) - l.emit(tokenTrue) - return l.lexRvalue -} - -func (l *tomlLexer) lexFalse() tomlLexStateFn { - l.fastForward(5) - l.emit(tokenFalse) - return l.lexRvalue -} - -func (l *tomlLexer) lexInf() tomlLexStateFn { - l.fastForward(3) - l.emit(tokenInf) - return l.lexRvalue -} - -func (l *tomlLexer) lexNan() tomlLexStateFn { - l.fastForward(3) - l.emit(tokenNan) - return l.lexRvalue -} - -func (l *tomlLexer) lexEqual() tomlLexStateFn { - l.next() - l.emit(tokenEqual) - return l.lexRvalue -} - -func (l *tomlLexer) lexComma() tomlLexStateFn { - l.next() - l.emit(tokenComma) - return l.lexRvalue -} - -// Parse the key and emits its value without escape sequences. -// bare keys, basic string keys and literal string keys are supported. -func (l *tomlLexer) lexKey() tomlLexStateFn { - growingString := "" - - for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() { - if r == '"' { - l.next() - str, err := l.lexStringAsString(`"`, false, true) - if err != nil { - return l.errorf(err.Error()) - } - growingString += str - l.next() - continue - } else if r == '\'' { - l.next() - str, err := l.lexLiteralStringAsString(`'`, false) - if err != nil { - return l.errorf(err.Error()) - } - growingString += str - l.next() - continue - } else if r == '\n' { - return l.errorf("keys cannot contain new lines") - } else if isSpace(r) { - break - } else if !isValidBareChar(r) { - return l.errorf("keys cannot contain %c character", r) - } - growingString += string(r) - l.next() - } - l.emitWithValue(tokenKey, growingString) - return l.lexVoid -} - -func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn { - return func() tomlLexStateFn { - for next := l.peek(); next != '\n' && next != eof; next = l.peek() { - if next == '\r' && l.follow("\r\n") { - break - } - l.next() - } - l.ignore() - return previousState - } -} - -func (l *tomlLexer) lexLeftBracket() tomlLexStateFn { - l.next() - l.emit(tokenLeftBracket) - return l.lexRvalue -} - -func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) { - growingString := "" - - if discardLeadingNewLine { - if l.follow("\r\n") { - l.skip() - l.skip() - } else if l.peek() == '\n' { - l.skip() - } - } - - // find end of string - for { - if l.follow(terminator) { - return growingString, nil - } - - next := l.peek() - if next == eof { - break - } - growingString += string(l.next()) - } - - return "", errors.New("unclosed string") -} - -func (l *tomlLexer) lexLiteralString() tomlLexStateFn { - l.skip() - - // handle special case for triple-quote - terminator := "'" - discardLeadingNewLine := false - if l.follow("''") { - l.skip() - l.skip() - terminator = "'''" - discardLeadingNewLine = true - } - - str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine) - if err != nil { - return l.errorf(err.Error()) - } - - l.emitWithValue(tokenString, str) - l.fastForward(len(terminator)) - l.ignore() - return l.lexRvalue -} - -// Lex a string and return the results as a string. -// Terminator is the substring indicating the end of the token. -// The resulting string does not include the terminator. -func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) { - growingString := "" - - if discardLeadingNewLine { - if l.follow("\r\n") { - l.skip() - l.skip() - } else if l.peek() == '\n' { - l.skip() - } - } - - for { - if l.follow(terminator) { - return growingString, nil - } - - if l.follow("\\") { - l.next() - switch l.peek() { - case '\r': - fallthrough - case '\n': - fallthrough - case '\t': - fallthrough - case ' ': - // skip all whitespace chars following backslash - for strings.ContainsRune("\r\n\t ", l.peek()) { - l.next() - } - case '"': - growingString += "\"" - l.next() - case 'n': - growingString += "\n" - l.next() - case 'b': - growingString += "\b" - l.next() - case 'f': - growingString += "\f" - l.next() - case '/': - growingString += "/" - l.next() - case 't': - growingString += "\t" - l.next() - case 'r': - growingString += "\r" - l.next() - case '\\': - growingString += "\\" - l.next() - case 'u': - l.next() - code := "" - for i := 0; i < 4; i++ { - c := l.peek() - if !isHexDigit(c) { - return "", errors.New("unfinished unicode escape") - } - l.next() - code = code + string(c) - } - intcode, err := strconv.ParseInt(code, 16, 32) - if err != nil { - return "", errors.New("invalid unicode escape: \\u" + code) - } - growingString += string(rune(intcode)) - case 'U': - l.next() - code := "" - for i := 0; i < 8; i++ { - c := l.peek() - if !isHexDigit(c) { - return "", errors.New("unfinished unicode escape") - } - l.next() - code = code + string(c) - } - intcode, err := strconv.ParseInt(code, 16, 64) - if err != nil { - return "", errors.New("invalid unicode escape: \\U" + code) - } - growingString += string(rune(intcode)) - default: - return "", errors.New("invalid escape sequence: \\" + string(l.peek())) - } - } else { - r := l.peek() - - if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) { - return "", fmt.Errorf("unescaped control character %U", r) - } - l.next() - growingString += string(r) - } - - if l.peek() == eof { - break - } - } - - return "", errors.New("unclosed string") -} - -func (l *tomlLexer) lexString() tomlLexStateFn { - l.skip() - - // handle special case for triple-quote - terminator := `"` - discardLeadingNewLine := false - acceptNewLines := false - if l.follow(`""`) { - l.skip() - l.skip() - terminator = `"""` - discardLeadingNewLine = true - acceptNewLines = true - } - - str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines) - - if err != nil { - return l.errorf(err.Error()) - } - - l.emitWithValue(tokenString, str) - l.fastForward(len(terminator)) - l.ignore() - return l.lexRvalue -} - -func (l *tomlLexer) lexTableKey() tomlLexStateFn { - l.next() - - if l.peek() == '[' { - // token '[[' signifies an array of tables - l.next() - l.emit(tokenDoubleLeftBracket) - return l.lexInsideTableArrayKey - } - // vanilla table key - l.emit(tokenLeftBracket) - return l.lexInsideTableKey -} - -// Parse the key till "]]", but only bare keys are supported -func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn { - for r := l.peek(); r != eof; r = l.peek() { - switch r { - case ']': - if l.currentTokenStop > l.currentTokenStart { - l.emit(tokenKeyGroupArray) - } - l.next() - if l.peek() != ']' { - break - } - l.next() - l.emit(tokenDoubleRightBracket) - return l.lexVoid - case '[': - return l.errorf("table array key cannot contain ']'") - default: - l.next() - } - } - return l.errorf("unclosed table array key") -} - -// Parse the key till "]" but only bare keys are supported -func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn { - for r := l.peek(); r != eof; r = l.peek() { - switch r { - case ']': - if l.currentTokenStop > l.currentTokenStart { - l.emit(tokenKeyGroup) - } - l.next() - l.emit(tokenRightBracket) - return l.lexVoid - case '[': - return l.errorf("table key cannot contain ']'") - default: - l.next() - } - } - return l.errorf("unclosed table key") -} - -func (l *tomlLexer) lexRightBracket() tomlLexStateFn { - l.next() - l.emit(tokenRightBracket) - return l.lexRvalue -} - -type validRuneFn func(r rune) bool - -func isValidHexRune(r rune) bool { - return r >= 'a' && r <= 'f' || - r >= 'A' && r <= 'F' || - r >= '0' && r <= '9' || - r == '_' -} - -func isValidOctalRune(r rune) bool { - return r >= '0' && r <= '7' || r == '_' -} - -func isValidBinaryRune(r rune) bool { - return r == '0' || r == '1' || r == '_' -} - -func (l *tomlLexer) lexNumber() tomlLexStateFn { - r := l.peek() - - if r == '0' { - follow := l.peekString(2) - if len(follow) == 2 { - var isValidRune validRuneFn - switch follow[1] { - case 'x': - isValidRune = isValidHexRune - case 'o': - isValidRune = isValidOctalRune - case 'b': - isValidRune = isValidBinaryRune - default: - if follow[1] >= 'a' && follow[1] <= 'z' || follow[1] >= 'A' && follow[1] <= 'Z' { - return l.errorf("unknown number base: %s. possible options are x (hex) o (octal) b (binary)", string(follow[1])) - } - } - - if isValidRune != nil { - l.next() - l.next() - digitSeen := false - for { - next := l.peek() - if !isValidRune(next) { - break - } - digitSeen = true - l.next() - } - - if !digitSeen { - return l.errorf("number needs at least one digit") - } - - l.emit(tokenInteger) - - return l.lexRvalue - } - } - } - - if r == '+' || r == '-' { - l.next() - if l.follow("inf") { - return l.lexInf - } - if l.follow("nan") { - return l.lexNan - } - } - - pointSeen := false - expSeen := false - digitSeen := false - for { - next := l.peek() - if next == '.' { - if pointSeen { - return l.errorf("cannot have two dots in one float") - } - l.next() - if !isDigit(l.peek()) { - return l.errorf("float cannot end with a dot") - } - pointSeen = true - } else if next == 'e' || next == 'E' { - expSeen = true - l.next() - r := l.peek() - if r == '+' || r == '-' { - l.next() - } - } else if isDigit(next) { - digitSeen = true - l.next() - } else if next == '_' { - l.next() - } else { - break - } - if pointSeen && !digitSeen { - return l.errorf("cannot start float with a dot") - } - } - - if !digitSeen { - return l.errorf("no digit in that number") - } - if pointSeen || expSeen { - l.emit(tokenFloat) - } else { - l.emit(tokenInteger) - } - return l.lexRvalue -} - -func (l *tomlLexer) run() { - for state := l.lexVoid; state != nil; { - state = state() - } -} - -func init() { - dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`) -} - -// Entry point -func lexToml(inputBytes []byte) []token { - runes := bytes.Runes(inputBytes) - l := &tomlLexer{ - input: runes, - tokens: make([]token, 0, 256), - line: 1, - col: 1, - endbufferLine: 1, - endbufferCol: 1, - } - l.run() - return l.tokens -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/marshal.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/marshal.go deleted file mode 100644 index b5a241505b..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/marshal.go +++ /dev/null @@ -1,600 +0,0 @@ -package toml - -import ( - "bytes" - "errors" - "fmt" - "io" - "reflect" - "strconv" - "strings" - "time" -) - -type tomlOpts struct { - name string - comment string - commented bool - include bool - omitempty bool -} - -type encOpts struct { - quoteMapKeys bool - arraysOneElementPerLine bool -} - -var encOptsDefaults = encOpts{ - quoteMapKeys: false, -} - -var timeType = reflect.TypeOf(time.Time{}) -var marshalerType = reflect.TypeOf(new(Marshaler)).Elem() - -// Check if the given marshall type maps to a Tree primitive -func isPrimitive(mtype reflect.Type) bool { - switch mtype.Kind() { - case reflect.Ptr: - return isPrimitive(mtype.Elem()) - case reflect.Bool: - return true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return true - case reflect.Float32, reflect.Float64: - return true - case reflect.String: - return true - case reflect.Struct: - return mtype == timeType || isCustomMarshaler(mtype) - default: - return false - } -} - -// Check if the given marshall type maps to a Tree slice -func isTreeSlice(mtype reflect.Type) bool { - switch mtype.Kind() { - case reflect.Slice: - return !isOtherSlice(mtype) - default: - return false - } -} - -// Check if the given marshall type maps to a non-Tree slice -func isOtherSlice(mtype reflect.Type) bool { - switch mtype.Kind() { - case reflect.Ptr: - return isOtherSlice(mtype.Elem()) - case reflect.Slice: - return isPrimitive(mtype.Elem()) || isOtherSlice(mtype.Elem()) - default: - return false - } -} - -// Check if the given marshall type maps to a Tree -func isTree(mtype reflect.Type) bool { - switch mtype.Kind() { - case reflect.Map: - return true - case reflect.Struct: - return !isPrimitive(mtype) - default: - return false - } -} - -func isCustomMarshaler(mtype reflect.Type) bool { - return mtype.Implements(marshalerType) -} - -func callCustomMarshaler(mval reflect.Value) ([]byte, error) { - return mval.Interface().(Marshaler).MarshalTOML() -} - -// Marshaler is the interface implemented by types that -// can marshal themselves into valid TOML. -type Marshaler interface { - MarshalTOML() ([]byte, error) -} - -/* -Marshal returns the TOML encoding of v. Behavior is similar to the Go json -encoder, except that there is no concept of a Marshaler interface or MarshalTOML -function for sub-structs, and currently only definite types can be marshaled -(i.e. no `interface{}`). - -The following struct annotations are supported: - - toml:"Field" Overrides the field's name to output. - omitempty When set, empty values and groups are not emitted. - comment:"comment" Emits a # comment on the same line. This supports new lines. - commented:"true" Emits the value as commented. - -Note that pointers are automatically assigned the "omitempty" option, as TOML -explicitly does not handle null values (saying instead the label should be -dropped). - -Tree structural types and corresponding marshal types: - - *Tree (*)struct, (*)map[string]interface{} - []*Tree (*)[](*)struct, (*)[](*)map[string]interface{} - []interface{} (as interface{}) (*)[]primitive, (*)[]([]interface{}) - interface{} (*)primitive - -Tree primitive types and corresponding marshal types: - - uint64 uint, uint8-uint64, pointers to same - int64 int, int8-uint64, pointers to same - float64 float32, float64, pointers to same - string string, pointers to same - bool bool, pointers to same - time.Time time.Time{}, pointers to same -*/ -func Marshal(v interface{}) ([]byte, error) { - return NewEncoder(nil).marshal(v) -} - -// Encoder writes TOML values to an output stream. -type Encoder struct { - w io.Writer - encOpts -} - -// NewEncoder returns a new encoder that writes to w. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: w, - encOpts: encOptsDefaults, - } -} - -// Encode writes the TOML encoding of v to the stream. -// -// See the documentation for Marshal for details. -func (e *Encoder) Encode(v interface{}) error { - b, err := e.marshal(v) - if err != nil { - return err - } - if _, err := e.w.Write(b); err != nil { - return err - } - return nil -} - -// QuoteMapKeys sets up the encoder to encode -// maps with string type keys with quoted TOML keys. -// -// This relieves the character limitations on map keys. -func (e *Encoder) QuoteMapKeys(v bool) *Encoder { - e.quoteMapKeys = v - return e -} - -// ArraysWithOneElementPerLine sets up the encoder to encode arrays -// with more than one element on multiple lines instead of one. -// -// For example: -// -// A = [1,2,3] -// -// Becomes -// -// A = [ -// 1, -// 2, -// 3 -// ] -func (e *Encoder) ArraysWithOneElementPerLine(v bool) *Encoder { - e.arraysOneElementPerLine = v - return e -} - -func (e *Encoder) marshal(v interface{}) ([]byte, error) { - mtype := reflect.TypeOf(v) - if mtype.Kind() != reflect.Struct { - return []byte{}, errors.New("Only a struct can be marshaled to TOML") - } - sval := reflect.ValueOf(v) - if isCustomMarshaler(mtype) { - return callCustomMarshaler(sval) - } - t, err := e.valueToTree(mtype, sval) - if err != nil { - return []byte{}, err - } - - var buf bytes.Buffer - _, err = t.writeTo(&buf, "", "", 0, e.arraysOneElementPerLine) - - return buf.Bytes(), err -} - -// Convert given marshal struct or map value to toml tree -func (e *Encoder) valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) { - if mtype.Kind() == reflect.Ptr { - return e.valueToTree(mtype.Elem(), mval.Elem()) - } - tval := newTree() - switch mtype.Kind() { - case reflect.Struct: - for i := 0; i < mtype.NumField(); i++ { - mtypef, mvalf := mtype.Field(i), mval.Field(i) - opts := tomlOptions(mtypef) - if opts.include && (!opts.omitempty || !isZero(mvalf)) { - val, err := e.valueToToml(mtypef.Type, mvalf) - if err != nil { - return nil, err - } - tval.SetWithComment(opts.name, opts.comment, opts.commented, val) - } - } - case reflect.Map: - for _, key := range mval.MapKeys() { - mvalf := mval.MapIndex(key) - val, err := e.valueToToml(mtype.Elem(), mvalf) - if err != nil { - return nil, err - } - if e.quoteMapKeys { - keyStr, err := tomlValueStringRepresentation(key.String(), "", e.arraysOneElementPerLine) - if err != nil { - return nil, err - } - tval.SetPath([]string{keyStr}, val) - } else { - tval.Set(key.String(), val) - } - } - } - return tval, nil -} - -// Convert given marshal slice to slice of Toml trees -func (e *Encoder) valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) { - tval := make([]*Tree, mval.Len(), mval.Len()) - for i := 0; i < mval.Len(); i++ { - val, err := e.valueToTree(mtype.Elem(), mval.Index(i)) - if err != nil { - return nil, err - } - tval[i] = val - } - return tval, nil -} - -// Convert given marshal slice to slice of toml values -func (e *Encoder) valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) { - tval := make([]interface{}, mval.Len(), mval.Len()) - for i := 0; i < mval.Len(); i++ { - val, err := e.valueToToml(mtype.Elem(), mval.Index(i)) - if err != nil { - return nil, err - } - tval[i] = val - } - return tval, nil -} - -// Convert given marshal value to toml value -func (e *Encoder) valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) { - if mtype.Kind() == reflect.Ptr { - return e.valueToToml(mtype.Elem(), mval.Elem()) - } - switch { - case isCustomMarshaler(mtype): - return callCustomMarshaler(mval) - case isTree(mtype): - return e.valueToTree(mtype, mval) - case isTreeSlice(mtype): - return e.valueToTreeSlice(mtype, mval) - case isOtherSlice(mtype): - return e.valueToOtherSlice(mtype, mval) - default: - switch mtype.Kind() { - case reflect.Bool: - return mval.Bool(), nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return mval.Int(), nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return mval.Uint(), nil - case reflect.Float32, reflect.Float64: - return mval.Float(), nil - case reflect.String: - return mval.String(), nil - case reflect.Struct: - return mval.Interface().(time.Time), nil - default: - return nil, fmt.Errorf("Marshal can't handle %v(%v)", mtype, mtype.Kind()) - } - } -} - -// Unmarshal attempts to unmarshal the Tree into a Go struct pointed by v. -// Neither Unmarshaler interfaces nor UnmarshalTOML functions are supported for -// sub-structs, and only definite types can be unmarshaled. -func (t *Tree) Unmarshal(v interface{}) error { - d := Decoder{tval: t} - return d.unmarshal(v) -} - -// Marshal returns the TOML encoding of Tree. -// See Marshal() documentation for types mapping table. -func (t *Tree) Marshal() ([]byte, error) { - var buf bytes.Buffer - err := NewEncoder(&buf).Encode(t) - return buf.Bytes(), err -} - -// Unmarshal parses the TOML-encoded data and stores the result in the value -// pointed to by v. Behavior is similar to the Go json encoder, except that there -// is no concept of an Unmarshaler interface or UnmarshalTOML function for -// sub-structs, and currently only definite types can be unmarshaled to (i.e. no -// `interface{}`). -// -// The following struct annotations are supported: -// -// toml:"Field" Overrides the field's name to map to. -// -// See Marshal() documentation for types mapping table. -func Unmarshal(data []byte, v interface{}) error { - t, err := LoadReader(bytes.NewReader(data)) - if err != nil { - return err - } - return t.Unmarshal(v) -} - -// Decoder reads and decodes TOML values from an input stream. -type Decoder struct { - r io.Reader - tval *Tree - encOpts -} - -// NewDecoder returns a new decoder that reads from r. -func NewDecoder(r io.Reader) *Decoder { - return &Decoder{ - r: r, - encOpts: encOptsDefaults, - } -} - -// Decode reads a TOML-encoded value from it's input -// and unmarshals it in the value pointed at by v. -// -// See the documentation for Marshal for details. -func (d *Decoder) Decode(v interface{}) error { - var err error - d.tval, err = LoadReader(d.r) - if err != nil { - return err - } - return d.unmarshal(v) -} - -func (d *Decoder) unmarshal(v interface{}) error { - mtype := reflect.TypeOf(v) - if mtype.Kind() != reflect.Ptr || mtype.Elem().Kind() != reflect.Struct { - return errors.New("Only a pointer to struct can be unmarshaled from TOML") - } - - sval, err := d.valueFromTree(mtype.Elem(), d.tval) - if err != nil { - return err - } - reflect.ValueOf(v).Elem().Set(sval) - return nil -} - -// Convert toml tree to marshal struct or map, using marshal type -func (d *Decoder) valueFromTree(mtype reflect.Type, tval *Tree) (reflect.Value, error) { - if mtype.Kind() == reflect.Ptr { - return d.unwrapPointer(mtype, tval) - } - var mval reflect.Value - switch mtype.Kind() { - case reflect.Struct: - mval = reflect.New(mtype).Elem() - for i := 0; i < mtype.NumField(); i++ { - mtypef := mtype.Field(i) - opts := tomlOptions(mtypef) - if opts.include { - baseKey := opts.name - keysToTry := []string{baseKey, strings.ToLower(baseKey), strings.ToTitle(baseKey)} - for _, key := range keysToTry { - exists := tval.Has(key) - if !exists { - continue - } - val := tval.Get(key) - mvalf, err := d.valueFromToml(mtypef.Type, val) - if err != nil { - return mval, formatError(err, tval.GetPosition(key)) - } - mval.Field(i).Set(mvalf) - break - } - } - } - case reflect.Map: - mval = reflect.MakeMap(mtype) - for _, key := range tval.Keys() { - // TODO: path splits key - val := tval.GetPath([]string{key}) - mvalf, err := d.valueFromToml(mtype.Elem(), val) - if err != nil { - return mval, formatError(err, tval.GetPosition(key)) - } - mval.SetMapIndex(reflect.ValueOf(key), mvalf) - } - } - return mval, nil -} - -// Convert toml value to marshal struct/map slice, using marshal type -func (d *Decoder) valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) { - mval := reflect.MakeSlice(mtype, len(tval), len(tval)) - for i := 0; i < len(tval); i++ { - val, err := d.valueFromTree(mtype.Elem(), tval[i]) - if err != nil { - return mval, err - } - mval.Index(i).Set(val) - } - return mval, nil -} - -// Convert toml value to marshal primitive slice, using marshal type -func (d *Decoder) valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) { - mval := reflect.MakeSlice(mtype, len(tval), len(tval)) - for i := 0; i < len(tval); i++ { - val, err := d.valueFromToml(mtype.Elem(), tval[i]) - if err != nil { - return mval, err - } - mval.Index(i).Set(val) - } - return mval, nil -} - -// Convert toml value to marshal value, using marshal type -func (d *Decoder) valueFromToml(mtype reflect.Type, tval interface{}) (reflect.Value, error) { - if mtype.Kind() == reflect.Ptr { - return d.unwrapPointer(mtype, tval) - } - - switch tval.(type) { - case *Tree: - if isTree(mtype) { - return d.valueFromTree(mtype, tval.(*Tree)) - } - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a tree", tval, tval) - case []*Tree: - if isTreeSlice(mtype) { - return d.valueFromTreeSlice(mtype, tval.([]*Tree)) - } - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to trees", tval, tval) - case []interface{}: - if isOtherSlice(mtype) { - return d.valueFromOtherSlice(mtype, tval.([]interface{})) - } - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a slice", tval, tval) - default: - switch mtype.Kind() { - case reflect.Bool, reflect.Struct: - val := reflect.ValueOf(tval) - // if this passes for when mtype is reflect.Struct, tval is a time.Time - if !val.Type().ConvertibleTo(mtype) { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) - } - - return val.Convert(mtype), nil - case reflect.String: - val := reflect.ValueOf(tval) - // stupidly, int64 is convertible to string. So special case this. - if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Int64 { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) - } - - return val.Convert(mtype), nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - val := reflect.ValueOf(tval) - if !val.Type().ConvertibleTo(mtype) { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) - } - if reflect.Indirect(reflect.New(mtype)).OverflowInt(val.Int()) { - return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) - } - - return val.Convert(mtype), nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - val := reflect.ValueOf(tval) - if !val.Type().ConvertibleTo(mtype) { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) - } - if val.Int() < 0 { - return reflect.ValueOf(nil), fmt.Errorf("%v(%T) is negative so does not fit in %v", tval, tval, mtype.String()) - } - if reflect.Indirect(reflect.New(mtype)).OverflowUint(uint64(val.Int())) { - return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) - } - - return val.Convert(mtype), nil - case reflect.Float32, reflect.Float64: - val := reflect.ValueOf(tval) - if !val.Type().ConvertibleTo(mtype) { - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) - } - if reflect.Indirect(reflect.New(mtype)).OverflowFloat(val.Float()) { - return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) - } - - return val.Convert(mtype), nil - default: - return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v(%v)", tval, tval, mtype, mtype.Kind()) - } - } -} - -func (d *Decoder) unwrapPointer(mtype reflect.Type, tval interface{}) (reflect.Value, error) { - val, err := d.valueFromToml(mtype.Elem(), tval) - if err != nil { - return reflect.ValueOf(nil), err - } - mval := reflect.New(mtype.Elem()) - mval.Elem().Set(val) - return mval, nil -} - -func tomlOptions(vf reflect.StructField) tomlOpts { - tag := vf.Tag.Get("toml") - parse := strings.Split(tag, ",") - var comment string - if c := vf.Tag.Get("comment"); c != "" { - comment = c - } - commented, _ := strconv.ParseBool(vf.Tag.Get("commented")) - result := tomlOpts{name: vf.Name, comment: comment, commented: commented, include: true, omitempty: false} - if parse[0] != "" { - if parse[0] == "-" && len(parse) == 1 { - result.include = false - } else { - result.name = strings.Trim(parse[0], " ") - } - } - if vf.PkgPath != "" { - result.include = false - } - if len(parse) > 1 && strings.Trim(parse[1], " ") == "omitempty" { - result.omitempty = true - } - if vf.Type.Kind() == reflect.Ptr { - result.omitempty = true - } - return result -} - -func isZero(val reflect.Value) bool { - switch val.Type().Kind() { - case reflect.Map: - fallthrough - case reflect.Array: - fallthrough - case reflect.Slice: - return val.Len() == 0 - default: - return reflect.DeepEqual(val.Interface(), reflect.Zero(val.Type()).Interface()) - } -} - -func formatError(err error, pos Position) error { - if err.Error()[0] == '(' { // Error already contains position information - return err - } - return fmt.Errorf("%s: %s", pos, err) -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/parser.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/parser.go deleted file mode 100644 index 2d27599a99..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/parser.go +++ /dev/null @@ -1,430 +0,0 @@ -// TOML Parser. - -package toml - -import ( - "errors" - "fmt" - "math" - "reflect" - "regexp" - "strconv" - "strings" - "time" -) - -type tomlParser struct { - flowIdx int - flow []token - tree *Tree - currentTable []string - seenTableKeys []string -} - -type tomlParserStateFn func() tomlParserStateFn - -// Formats and panics an error message based on a token -func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) { - panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...)) -} - -func (p *tomlParser) run() { - for state := p.parseStart; state != nil; { - state = state() - } -} - -func (p *tomlParser) peek() *token { - if p.flowIdx >= len(p.flow) { - return nil - } - return &p.flow[p.flowIdx] -} - -func (p *tomlParser) assume(typ tokenType) { - tok := p.getToken() - if tok == nil { - p.raiseError(tok, "was expecting token %s, but token stream is empty", tok) - } - if tok.typ != typ { - p.raiseError(tok, "was expecting token %s, but got %s instead", typ, tok) - } -} - -func (p *tomlParser) getToken() *token { - tok := p.peek() - if tok == nil { - return nil - } - p.flowIdx++ - return tok -} - -func (p *tomlParser) parseStart() tomlParserStateFn { - tok := p.peek() - - // end of stream, parsing is finished - if tok == nil { - return nil - } - - switch tok.typ { - case tokenDoubleLeftBracket: - return p.parseGroupArray - case tokenLeftBracket: - return p.parseGroup - case tokenKey: - return p.parseAssign - case tokenEOF: - return nil - default: - p.raiseError(tok, "unexpected token") - } - return nil -} - -func (p *tomlParser) parseGroupArray() tomlParserStateFn { - startToken := p.getToken() // discard the [[ - key := p.getToken() - if key.typ != tokenKeyGroupArray { - p.raiseError(key, "unexpected token %s, was expecting a table array key", key) - } - - // get or create table array element at the indicated part in the path - keys, err := parseKey(key.val) - if err != nil { - p.raiseError(key, "invalid table array key: %s", err) - } - p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries - destTree := p.tree.GetPath(keys) - var array []*Tree - if destTree == nil { - array = make([]*Tree, 0) - } else if target, ok := destTree.([]*Tree); ok && target != nil { - array = destTree.([]*Tree) - } else { - p.raiseError(key, "key %s is already assigned and not of type table array", key) - } - p.currentTable = keys - - // add a new tree to the end of the table array - newTree := newTree() - newTree.position = startToken.Position - array = append(array, newTree) - p.tree.SetPath(p.currentTable, array) - - // remove all keys that were children of this table array - prefix := key.val + "." - found := false - for ii := 0; ii < len(p.seenTableKeys); { - tableKey := p.seenTableKeys[ii] - if strings.HasPrefix(tableKey, prefix) { - p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...) - } else { - found = (tableKey == key.val) - ii++ - } - } - - // keep this key name from use by other kinds of assignments - if !found { - p.seenTableKeys = append(p.seenTableKeys, key.val) - } - - // move to next parser state - p.assume(tokenDoubleRightBracket) - return p.parseStart -} - -func (p *tomlParser) parseGroup() tomlParserStateFn { - startToken := p.getToken() // discard the [ - key := p.getToken() - if key.typ != tokenKeyGroup { - p.raiseError(key, "unexpected token %s, was expecting a table key", key) - } - for _, item := range p.seenTableKeys { - if item == key.val { - p.raiseError(key, "duplicated tables") - } - } - - p.seenTableKeys = append(p.seenTableKeys, key.val) - keys, err := parseKey(key.val) - if err != nil { - p.raiseError(key, "invalid table array key: %s", err) - } - if err := p.tree.createSubTree(keys, startToken.Position); err != nil { - p.raiseError(key, "%s", err) - } - p.assume(tokenRightBracket) - p.currentTable = keys - return p.parseStart -} - -func (p *tomlParser) parseAssign() tomlParserStateFn { - key := p.getToken() - p.assume(tokenEqual) - - value := p.parseRvalue() - var tableKey []string - if len(p.currentTable) > 0 { - tableKey = p.currentTable - } else { - tableKey = []string{} - } - - // find the table to assign, looking out for arrays of tables - var targetNode *Tree - switch node := p.tree.GetPath(tableKey).(type) { - case []*Tree: - targetNode = node[len(node)-1] - case *Tree: - targetNode = node - default: - p.raiseError(key, "Unknown table type for path: %s", - strings.Join(tableKey, ".")) - } - - // assign value to the found table - keyVals := []string{key.val} - if len(keyVals) != 1 { - p.raiseError(key, "Invalid key") - } - keyVal := keyVals[0] - localKey := []string{keyVal} - finalKey := append(tableKey, keyVal) - if targetNode.GetPath(localKey) != nil { - p.raiseError(key, "The following key was defined twice: %s", - strings.Join(finalKey, ".")) - } - var toInsert interface{} - - switch value.(type) { - case *Tree, []*Tree: - toInsert = value - default: - toInsert = &tomlValue{value: value, position: key.Position} - } - targetNode.values[keyVal] = toInsert - return p.parseStart -} - -var numberUnderscoreInvalidRegexp *regexp.Regexp -var hexNumberUnderscoreInvalidRegexp *regexp.Regexp - -func numberContainsInvalidUnderscore(value string) error { - if numberUnderscoreInvalidRegexp.MatchString(value) { - return errors.New("invalid use of _ in number") - } - return nil -} - -func hexNumberContainsInvalidUnderscore(value string) error { - if hexNumberUnderscoreInvalidRegexp.MatchString(value) { - return errors.New("invalid use of _ in hex number") - } - return nil -} - -func cleanupNumberToken(value string) string { - cleanedVal := strings.Replace(value, "_", "", -1) - return cleanedVal -} - -func (p *tomlParser) parseRvalue() interface{} { - tok := p.getToken() - if tok == nil || tok.typ == tokenEOF { - p.raiseError(tok, "expecting a value") - } - - switch tok.typ { - case tokenString: - return tok.val - case tokenTrue: - return true - case tokenFalse: - return false - case tokenInf: - if tok.val[0] == '-' { - return math.Inf(-1) - } - return math.Inf(1) - case tokenNan: - return math.NaN() - case tokenInteger: - cleanedVal := cleanupNumberToken(tok.val) - var err error - var val int64 - if len(cleanedVal) >= 3 && cleanedVal[0] == '0' { - switch cleanedVal[1] { - case 'x': - err = hexNumberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal[2:], 16, 64) - case 'o': - err = numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal[2:], 8, 64) - case 'b': - err = numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal[2:], 2, 64) - default: - panic("invalid base") // the lexer should catch this first - } - } else { - err = numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal, 10, 64) - } - if err != nil { - p.raiseError(tok, "%s", err) - } - return val - case tokenFloat: - err := numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - cleanedVal := cleanupNumberToken(tok.val) - val, err := strconv.ParseFloat(cleanedVal, 64) - if err != nil { - p.raiseError(tok, "%s", err) - } - return val - case tokenDate: - val, err := time.ParseInLocation(time.RFC3339Nano, tok.val, time.UTC) - if err != nil { - p.raiseError(tok, "%s", err) - } - return val - case tokenLeftBracket: - return p.parseArray() - case tokenLeftCurlyBrace: - return p.parseInlineTable() - case tokenEqual: - p.raiseError(tok, "cannot have multiple equals for the same key") - case tokenError: - p.raiseError(tok, "%s", tok) - } - - p.raiseError(tok, "never reached") - - return nil -} - -func tokenIsComma(t *token) bool { - return t != nil && t.typ == tokenComma -} - -func (p *tomlParser) parseInlineTable() *Tree { - tree := newTree() - var previous *token -Loop: - for { - follow := p.peek() - if follow == nil || follow.typ == tokenEOF { - p.raiseError(follow, "unterminated inline table") - } - switch follow.typ { - case tokenRightCurlyBrace: - p.getToken() - break Loop - case tokenKey: - if !tokenIsComma(previous) && previous != nil { - p.raiseError(follow, "comma expected between fields in inline table") - } - key := p.getToken() - p.assume(tokenEqual) - value := p.parseRvalue() - tree.Set(key.val, value) - case tokenComma: - if previous == nil { - p.raiseError(follow, "inline table cannot start with a comma") - } - if tokenIsComma(previous) { - p.raiseError(follow, "need field between two commas in inline table") - } - p.getToken() - default: - p.raiseError(follow, "unexpected token type in inline table: %s", follow.String()) - } - previous = follow - } - if tokenIsComma(previous) { - p.raiseError(previous, "trailing comma at the end of inline table") - } - return tree -} - -func (p *tomlParser) parseArray() interface{} { - var array []interface{} - arrayType := reflect.TypeOf(nil) - for { - follow := p.peek() - if follow == nil || follow.typ == tokenEOF { - p.raiseError(follow, "unterminated array") - } - if follow.typ == tokenRightBracket { - p.getToken() - break - } - val := p.parseRvalue() - if arrayType == nil { - arrayType = reflect.TypeOf(val) - } - if reflect.TypeOf(val) != arrayType { - p.raiseError(follow, "mixed types in array") - } - array = append(array, val) - follow = p.peek() - if follow == nil || follow.typ == tokenEOF { - p.raiseError(follow, "unterminated array") - } - if follow.typ != tokenRightBracket && follow.typ != tokenComma { - p.raiseError(follow, "missing comma") - } - if follow.typ == tokenComma { - p.getToken() - } - } - // An array of Trees is actually an array of inline - // tables, which is a shorthand for a table array. If the - // array was not converted from []interface{} to []*Tree, - // the two notations would not be equivalent. - if arrayType == reflect.TypeOf(newTree()) { - tomlArray := make([]*Tree, len(array)) - for i, v := range array { - tomlArray[i] = v.(*Tree) - } - return tomlArray - } - return array -} - -func parseToml(flow []token) *Tree { - result := newTree() - result.position = Position{1, 1} - parser := &tomlParser{ - flowIdx: 0, - flow: flow, - tree: result, - currentTable: make([]string, 0), - seenTableKeys: make([]string, 0), - } - parser.run() - return result -} - -func init() { - numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d])|_$|^_`) - hexNumberUnderscoreInvalidRegexp = regexp.MustCompile(`(^0x_)|([^\da-f]_|_[^\da-f])|_$|^_`) -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/position.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/position.go deleted file mode 100644 index c17bff87ba..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/position.go +++ /dev/null @@ -1,29 +0,0 @@ -// Position support for go-toml - -package toml - -import ( - "fmt" -) - -// Position of a document element within a TOML document. -// -// Line and Col are both 1-indexed positions for the element's line number and -// column number, respectively. Values of zero or less will cause Invalid(), -// to return true. -type Position struct { - Line int // line within the document - Col int // column within the line -} - -// String representation of the position. -// Displays 1-indexed line and column numbers. -func (p Position) String() string { - return fmt.Sprintf("(%d, %d)", p.Line, p.Col) -} - -// Invalid returns whether or not the position is valid (i.e. with negative or -// null values) -func (p Position) Invalid() bool { - return p.Line <= 0 || p.Col <= 0 -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/token.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/token.go deleted file mode 100644 index 1a90813466..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/token.go +++ /dev/null @@ -1,144 +0,0 @@ -package toml - -import ( - "fmt" - "strconv" - "unicode" -) - -// Define tokens -type tokenType int - -const ( - eof = -(iota + 1) -) - -const ( - tokenError tokenType = iota - tokenEOF - tokenComment - tokenKey - tokenString - tokenInteger - tokenTrue - tokenFalse - tokenFloat - tokenInf - tokenNan - tokenEqual - tokenLeftBracket - tokenRightBracket - tokenLeftCurlyBrace - tokenRightCurlyBrace - tokenLeftParen - tokenRightParen - tokenDoubleLeftBracket - tokenDoubleRightBracket - tokenDate - tokenKeyGroup - tokenKeyGroupArray - tokenComma - tokenColon - tokenDollar - tokenStar - tokenQuestion - tokenDot - tokenDotDot - tokenEOL -) - -var tokenTypeNames = []string{ - "Error", - "EOF", - "Comment", - "Key", - "String", - "Integer", - "True", - "False", - "Float", - "Inf", - "NaN", - "=", - "[", - "]", - "{", - "}", - "(", - ")", - "]]", - "[[", - "Date", - "KeyGroup", - "KeyGroupArray", - ",", - ":", - "$", - "*", - "?", - ".", - "..", - "EOL", -} - -type token struct { - Position - typ tokenType - val string -} - -func (tt tokenType) String() string { - idx := int(tt) - if idx < len(tokenTypeNames) { - return tokenTypeNames[idx] - } - return "Unknown" -} - -func (t token) Int() int { - if result, err := strconv.Atoi(t.val); err != nil { - panic(err) - } else { - return result - } -} - -func (t token) String() string { - switch t.typ { - case tokenEOF: - return "EOF" - case tokenError: - return t.val - } - - return fmt.Sprintf("%q", t.val) -} - -func isSpace(r rune) bool { - return r == ' ' || r == '\t' -} - -func isAlphanumeric(r rune) bool { - return unicode.IsLetter(r) || r == '_' -} - -func isKeyChar(r rune) bool { - // Keys start with the first character that isn't whitespace or [ and end - // with the last non-whitespace character before the equals sign. Keys - // cannot contain a # character." - return !(r == '\r' || r == '\n' || r == eof || r == '=') -} - -func isKeyStartChar(r rune) bool { - return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[') -} - -func isDigit(r rune) bool { - return unicode.IsNumber(r) -} - -func isHexDigit(r rune) bool { - return isDigit(r) || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/toml.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/toml.go deleted file mode 100644 index 05493a444b..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/toml.go +++ /dev/null @@ -1,309 +0,0 @@ -package toml - -import ( - "errors" - "fmt" - "io" - "io/ioutil" - "os" - "runtime" - "strings" -) - -type tomlValue struct { - value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list - comment string - commented bool - position Position -} - -// Tree is the result of the parsing of a TOML file. -type Tree struct { - values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree - comment string - commented bool - position Position -} - -func newTree() *Tree { - return &Tree{ - values: make(map[string]interface{}), - position: Position{}, - } -} - -// TreeFromMap initializes a new Tree object using the given map. -func TreeFromMap(m map[string]interface{}) (*Tree, error) { - result, err := toTree(m) - if err != nil { - return nil, err - } - return result.(*Tree), nil -} - -// Position returns the position of the tree. -func (t *Tree) Position() Position { - return t.position -} - -// Has returns a boolean indicating if the given key exists. -func (t *Tree) Has(key string) bool { - if key == "" { - return false - } - return t.HasPath(strings.Split(key, ".")) -} - -// HasPath returns true if the given path of keys exists, false otherwise. -func (t *Tree) HasPath(keys []string) bool { - return t.GetPath(keys) != nil -} - -// Keys returns the keys of the toplevel tree (does not recurse). -func (t *Tree) Keys() []string { - keys := make([]string, len(t.values)) - i := 0 - for k := range t.values { - keys[i] = k - i++ - } - return keys -} - -// Get the value at key in the Tree. -// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings. -// If you need to retrieve non-bare keys, use GetPath. -// Returns nil if the path does not exist in the tree. -// If keys is of length zero, the current tree is returned. -func (t *Tree) Get(key string) interface{} { - if key == "" { - return t - } - return t.GetPath(strings.Split(key, ".")) -} - -// GetPath returns the element in the tree indicated by 'keys'. -// If keys is of length zero, the current tree is returned. -func (t *Tree) GetPath(keys []string) interface{} { - if len(keys) == 0 { - return t - } - subtree := t - for _, intermediateKey := range keys[:len(keys)-1] { - value, exists := subtree.values[intermediateKey] - if !exists { - return nil - } - switch node := value.(type) { - case *Tree: - subtree = node - case []*Tree: - // go to most recent element - if len(node) == 0 { - return nil - } - subtree = node[len(node)-1] - default: - return nil // cannot navigate through other node types - } - } - // branch based on final node type - switch node := subtree.values[keys[len(keys)-1]].(type) { - case *tomlValue: - return node.value - default: - return node - } -} - -// GetPosition returns the position of the given key. -func (t *Tree) GetPosition(key string) Position { - if key == "" { - return t.position - } - return t.GetPositionPath(strings.Split(key, ".")) -} - -// GetPositionPath returns the element in the tree indicated by 'keys'. -// If keys is of length zero, the current tree is returned. -func (t *Tree) GetPositionPath(keys []string) Position { - if len(keys) == 0 { - return t.position - } - subtree := t - for _, intermediateKey := range keys[:len(keys)-1] { - value, exists := subtree.values[intermediateKey] - if !exists { - return Position{0, 0} - } - switch node := value.(type) { - case *Tree: - subtree = node - case []*Tree: - // go to most recent element - if len(node) == 0 { - return Position{0, 0} - } - subtree = node[len(node)-1] - default: - return Position{0, 0} - } - } - // branch based on final node type - switch node := subtree.values[keys[len(keys)-1]].(type) { - case *tomlValue: - return node.position - case *Tree: - return node.position - case []*Tree: - // go to most recent element - if len(node) == 0 { - return Position{0, 0} - } - return node[len(node)-1].position - default: - return Position{0, 0} - } -} - -// GetDefault works like Get but with a default value -func (t *Tree) GetDefault(key string, def interface{}) interface{} { - val := t.Get(key) - if val == nil { - return def - } - return val -} - -// Set an element in the tree. -// Key is a dot-separated path (e.g. a.b.c). -// Creates all necessary intermediate trees, if needed. -func (t *Tree) Set(key string, value interface{}) { - t.SetWithComment(key, "", false, value) -} - -// SetWithComment is the same as Set, but allows you to provide comment -// information to the key, that will be reused by Marshal(). -func (t *Tree) SetWithComment(key string, comment string, commented bool, value interface{}) { - t.SetPathWithComment(strings.Split(key, "."), comment, commented, value) -} - -// SetPath sets an element in the tree. -// Keys is an array of path elements (e.g. {"a","b","c"}). -// Creates all necessary intermediate trees, if needed. -func (t *Tree) SetPath(keys []string, value interface{}) { - t.SetPathWithComment(keys, "", false, value) -} - -// SetPathWithComment is the same as SetPath, but allows you to provide comment -// information to the key, that will be reused by Marshal(). -func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) { - subtree := t - for _, intermediateKey := range keys[:len(keys)-1] { - nextTree, exists := subtree.values[intermediateKey] - if !exists { - nextTree = newTree() - subtree.values[intermediateKey] = nextTree // add new element here - } - switch node := nextTree.(type) { - case *Tree: - subtree = node - case []*Tree: - // go to most recent element - if len(node) == 0 { - // create element if it does not exist - subtree.values[intermediateKey] = append(node, newTree()) - } - subtree = node[len(node)-1] - } - } - - var toInsert interface{} - - switch value.(type) { - case *Tree: - tt := value.(*Tree) - tt.comment = comment - toInsert = value - case []*Tree: - toInsert = value - case *tomlValue: - tt := value.(*tomlValue) - tt.comment = comment - toInsert = tt - default: - toInsert = &tomlValue{value: value, comment: comment, commented: commented} - } - - subtree.values[keys[len(keys)-1]] = toInsert -} - -// createSubTree takes a tree and a key and create the necessary intermediate -// subtrees to create a subtree at that point. In-place. -// -// e.g. passing a.b.c will create (assuming tree is empty) tree[a], tree[a][b] -// and tree[a][b][c] -// -// Returns nil on success, error object on failure -func (t *Tree) createSubTree(keys []string, pos Position) error { - subtree := t - for _, intermediateKey := range keys { - nextTree, exists := subtree.values[intermediateKey] - if !exists { - tree := newTree() - tree.position = pos - subtree.values[intermediateKey] = tree - nextTree = tree - } - - switch node := nextTree.(type) { - case []*Tree: - subtree = node[len(node)-1] - case *Tree: - subtree = node - default: - return fmt.Errorf("unknown type for path %s (%s): %T (%#v)", - strings.Join(keys, "."), intermediateKey, nextTree, nextTree) - } - } - return nil -} - -// LoadBytes creates a Tree from a []byte. -func LoadBytes(b []byte) (tree *Tree, err error) { - defer func() { - if r := recover(); r != nil { - if _, ok := r.(runtime.Error); ok { - panic(r) - } - err = errors.New(r.(string)) - } - }() - tree = parseToml(lexToml(b)) - return -} - -// LoadReader creates a Tree from any io.Reader. -func LoadReader(reader io.Reader) (tree *Tree, err error) { - inputBytes, err := ioutil.ReadAll(reader) - if err != nil { - return - } - tree, err = LoadBytes(inputBytes) - return -} - -// Load creates a Tree from a string. -func Load(content string) (tree *Tree, err error) { - return LoadBytes([]byte(content)) -} - -// LoadFile creates a Tree from a file. -func LoadFile(path string) (tree *Tree, err error) { - file, err := os.Open(path) - if err != nil { - return nil, err - } - defer file.Close() - return LoadReader(file) -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_create.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_create.go deleted file mode 100644 index 79610e9b34..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_create.go +++ /dev/null @@ -1,142 +0,0 @@ -package toml - -import ( - "fmt" - "reflect" - "time" -) - -var kindToType = [reflect.String + 1]reflect.Type{ - reflect.Bool: reflect.TypeOf(true), - reflect.String: reflect.TypeOf(""), - reflect.Float32: reflect.TypeOf(float64(1)), - reflect.Float64: reflect.TypeOf(float64(1)), - reflect.Int: reflect.TypeOf(int64(1)), - reflect.Int8: reflect.TypeOf(int64(1)), - reflect.Int16: reflect.TypeOf(int64(1)), - reflect.Int32: reflect.TypeOf(int64(1)), - reflect.Int64: reflect.TypeOf(int64(1)), - reflect.Uint: reflect.TypeOf(uint64(1)), - reflect.Uint8: reflect.TypeOf(uint64(1)), - reflect.Uint16: reflect.TypeOf(uint64(1)), - reflect.Uint32: reflect.TypeOf(uint64(1)), - reflect.Uint64: reflect.TypeOf(uint64(1)), -} - -// typeFor returns a reflect.Type for a reflect.Kind, or nil if none is found. -// supported values: -// string, bool, int64, uint64, float64, time.Time, int, int8, int16, int32, uint, uint8, uint16, uint32, float32 -func typeFor(k reflect.Kind) reflect.Type { - if k > 0 && int(k) < len(kindToType) { - return kindToType[k] - } - return nil -} - -func simpleValueCoercion(object interface{}) (interface{}, error) { - switch original := object.(type) { - case string, bool, int64, uint64, float64, time.Time: - return original, nil - case int: - return int64(original), nil - case int8: - return int64(original), nil - case int16: - return int64(original), nil - case int32: - return int64(original), nil - case uint: - return uint64(original), nil - case uint8: - return uint64(original), nil - case uint16: - return uint64(original), nil - case uint32: - return uint64(original), nil - case float32: - return float64(original), nil - case fmt.Stringer: - return original.String(), nil - default: - return nil, fmt.Errorf("cannot convert type %T to Tree", object) - } -} - -func sliceToTree(object interface{}) (interface{}, error) { - // arrays are a bit tricky, since they can represent either a - // collection of simple values, which is represented by one - // *tomlValue, or an array of tables, which is represented by an - // array of *Tree. - - // holding the assumption that this function is called from toTree only when value.Kind() is Array or Slice - value := reflect.ValueOf(object) - insideType := value.Type().Elem() - length := value.Len() - if length > 0 { - insideType = reflect.ValueOf(value.Index(0).Interface()).Type() - } - if insideType.Kind() == reflect.Map { - // this is considered as an array of tables - tablesArray := make([]*Tree, 0, length) - for i := 0; i < length; i++ { - table := value.Index(i) - tree, err := toTree(table.Interface()) - if err != nil { - return nil, err - } - tablesArray = append(tablesArray, tree.(*Tree)) - } - return tablesArray, nil - } - - sliceType := typeFor(insideType.Kind()) - if sliceType == nil { - sliceType = insideType - } - - arrayValue := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length) - - for i := 0; i < length; i++ { - val := value.Index(i).Interface() - simpleValue, err := simpleValueCoercion(val) - if err != nil { - return nil, err - } - arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue)) - } - return &tomlValue{value: arrayValue.Interface(), position: Position{}}, nil -} - -func toTree(object interface{}) (interface{}, error) { - value := reflect.ValueOf(object) - - if value.Kind() == reflect.Map { - values := map[string]interface{}{} - keys := value.MapKeys() - for _, key := range keys { - if key.Kind() != reflect.String { - if _, ok := key.Interface().(string); !ok { - return nil, fmt.Errorf("map key needs to be a string, not %T (%v)", key.Interface(), key.Kind()) - } - } - - v := value.MapIndex(key) - newValue, err := toTree(v.Interface()) - if err != nil { - return nil, err - } - values[key.String()] = newValue - } - return &Tree{values: values, position: Position{}}, nil - } - - if value.Kind() == reflect.Array || value.Kind() == reflect.Slice { - return sliceToTree(object) - } - - simpleValue, err := simpleValueCoercion(object) - if err != nil { - return nil, err - } - return &tomlValue{value: simpleValue, position: Position{}}, nil -} diff --git a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_write.go b/vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_write.go deleted file mode 100644 index d322a9764f..0000000000 --- a/vendor/repo-infra/vendor/github.com/pelletier/go-toml/tomltree_write.go +++ /dev/null @@ -1,289 +0,0 @@ -package toml - -import ( - "bytes" - "fmt" - "io" - "math" - "reflect" - "sort" - "strconv" - "strings" - "time" -) - -// encodes a string to a TOML-compliant string value -func encodeTomlString(value string) string { - var b bytes.Buffer - - for _, rr := range value { - switch rr { - case '\b': - b.WriteString(`\b`) - case '\t': - b.WriteString(`\t`) - case '\n': - b.WriteString(`\n`) - case '\f': - b.WriteString(`\f`) - case '\r': - b.WriteString(`\r`) - case '"': - b.WriteString(`\"`) - case '\\': - b.WriteString(`\\`) - default: - intRr := uint16(rr) - if intRr < 0x001F { - b.WriteString(fmt.Sprintf("\\u%0.4X", intRr)) - } else { - b.WriteRune(rr) - } - } - } - return b.String() -} - -func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElementPerLine bool) (string, error) { - switch value := v.(type) { - case uint64: - return strconv.FormatUint(value, 10), nil - case int64: - return strconv.FormatInt(value, 10), nil - case float64: - // Ensure a round float does contain a decimal point. Otherwise feeding - // the output back to the parser would convert to an integer. - if math.Trunc(value) == value { - return strings.ToLower(strconv.FormatFloat(value, 'f', 1, 32)), nil - } - return strings.ToLower(strconv.FormatFloat(value, 'f', -1, 32)), nil - case string: - return "\"" + encodeTomlString(value) + "\"", nil - case []byte: - b, _ := v.([]byte) - return tomlValueStringRepresentation(string(b), indent, arraysOneElementPerLine) - case bool: - if value { - return "true", nil - } - return "false", nil - case time.Time: - return value.Format(time.RFC3339), nil - case nil: - return "", nil - } - - rv := reflect.ValueOf(v) - - if rv.Kind() == reflect.Slice { - var values []string - for i := 0; i < rv.Len(); i++ { - item := rv.Index(i).Interface() - itemRepr, err := tomlValueStringRepresentation(item, indent, arraysOneElementPerLine) - if err != nil { - return "", err - } - values = append(values, itemRepr) - } - if arraysOneElementPerLine && len(values) > 1 { - stringBuffer := bytes.Buffer{} - valueIndent := indent + ` ` // TODO: move that to a shared encoder state - - stringBuffer.WriteString("[\n") - - for i, value := range values { - stringBuffer.WriteString(valueIndent) - stringBuffer.WriteString(value) - if i != len(values)-1 { - stringBuffer.WriteString(`,`) - } - stringBuffer.WriteString("\n") - } - - stringBuffer.WriteString(indent + "]") - - return stringBuffer.String(), nil - } - return "[" + strings.Join(values, ",") + "]", nil - } - return "", fmt.Errorf("unsupported value type %T: %v", v, v) -} - -func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) { - simpleValuesKeys := make([]string, 0) - complexValuesKeys := make([]string, 0) - - for k := range t.values { - v := t.values[k] - switch v.(type) { - case *Tree, []*Tree: - complexValuesKeys = append(complexValuesKeys, k) - default: - simpleValuesKeys = append(simpleValuesKeys, k) - } - } - - sort.Strings(simpleValuesKeys) - sort.Strings(complexValuesKeys) - - for _, k := range simpleValuesKeys { - v, ok := t.values[k].(*tomlValue) - if !ok { - return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k]) - } - - repr, err := tomlValueStringRepresentation(v.value, indent, arraysOneElementPerLine) - if err != nil { - return bytesCount, err - } - - if v.comment != "" { - comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1) - start := "# " - if strings.HasPrefix(comment, "#") { - start = "" - } - writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n") - bytesCount += int64(writtenBytesCountComment) - if errc != nil { - return bytesCount, errc - } - } - - var commented string - if v.commented { - commented = "# " - } - writtenBytesCount, err := writeStrings(w, indent, commented, k, " = ", repr, "\n") - bytesCount += int64(writtenBytesCount) - if err != nil { - return bytesCount, err - } - } - - for _, k := range complexValuesKeys { - v := t.values[k] - - combinedKey := k - if keyspace != "" { - combinedKey = keyspace + "." + combinedKey - } - var commented string - if t.commented { - commented = "# " - } - - switch node := v.(type) { - // node has to be of those two types given how keys are sorted above - case *Tree: - tv, ok := t.values[k].(*Tree) - if !ok { - return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k]) - } - if tv.comment != "" { - comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1) - start := "# " - if strings.HasPrefix(comment, "#") { - start = "" - } - writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment) - bytesCount += int64(writtenBytesCountComment) - if errc != nil { - return bytesCount, errc - } - } - writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n") - bytesCount += int64(writtenBytesCount) - if err != nil { - return bytesCount, err - } - bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine) - if err != nil { - return bytesCount, err - } - case []*Tree: - for _, subTree := range node { - writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n") - bytesCount += int64(writtenBytesCount) - if err != nil { - return bytesCount, err - } - - bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine) - if err != nil { - return bytesCount, err - } - } - } - } - - return bytesCount, nil -} - -func writeStrings(w io.Writer, s ...string) (int, error) { - var n int - for i := range s { - b, err := io.WriteString(w, s[i]) - n += b - if err != nil { - return n, err - } - } - return n, nil -} - -// WriteTo encode the Tree as Toml and writes it to the writer w. -// Returns the number of bytes written in case of success, or an error if anything happened. -func (t *Tree) WriteTo(w io.Writer) (int64, error) { - return t.writeTo(w, "", "", 0, false) -} - -// ToTomlString generates a human-readable representation of the current tree. -// Output spans multiple lines, and is suitable for ingest by a TOML parser. -// If the conversion cannot be performed, ToString returns a non-nil error. -func (t *Tree) ToTomlString() (string, error) { - var buf bytes.Buffer - _, err := t.WriteTo(&buf) - if err != nil { - return "", err - } - return buf.String(), nil -} - -// String generates a human-readable representation of the current tree. -// Alias of ToString. Present to implement the fmt.Stringer interface. -func (t *Tree) String() string { - result, _ := t.ToTomlString() - return result -} - -// ToMap recursively generates a representation of the tree using Go built-in structures. -// The following types are used: -// -// * bool -// * float64 -// * int64 -// * string -// * uint64 -// * time.Time -// * map[string]interface{} (where interface{} is any of this list) -// * []interface{} (where interface{} is any of this list) -func (t *Tree) ToMap() map[string]interface{} { - result := map[string]interface{}{} - - for k, v := range t.values { - switch node := v.(type) { - case []*Tree: - var array []interface{} - for _, item := range node { - array = append(array, item.ToMap()) - } - result[k] = array - case *Tree: - result[k] = node.ToMap() - case *tomlValue: - result[k] = node.value - } - } - return result -} diff --git a/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/LICENSE b/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/LICENSE deleted file mode 100644 index c67dad612a..0000000000 --- a/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013, Patrick Mezard -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution. - The names of its contributors may not be used to endorse or promote -products derived from this software without specific prior written -permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/BUILD.bazel b/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/BUILD.bazel deleted file mode 100644 index eaa2f3824f..0000000000 --- a/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/BUILD.bazel +++ /dev/null @@ -1,9 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["difflib.go"], - importmap = "k8s.io/repo-infra/vendor/github.com/pmezard/go-difflib/difflib", - importpath = "github.com/pmezard/go-difflib/difflib", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/difflib.go b/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/difflib.go deleted file mode 100644 index 003e99fadb..0000000000 --- a/vendor/repo-infra/vendor/github.com/pmezard/go-difflib/difflib/difflib.go +++ /dev/null @@ -1,772 +0,0 @@ -// Package difflib is a partial port of Python difflib module. -// -// It provides tools to compare sequences of strings and generate textual diffs. -// -// The following class and functions have been ported: -// -// - SequenceMatcher -// -// - unified_diff -// -// - context_diff -// -// Getting unified diffs was the main goal of the port. Keep in mind this code -// is mostly suitable to output text differences in a human friendly way, there -// are no guarantees generated diffs are consumable by patch(1). -package difflib - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" -) - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func calculateRatio(matches, length int) float64 { - if length > 0 { - return 2.0 * float64(matches) / float64(length) - } - return 1.0 -} - -type Match struct { - A int - B int - Size int -} - -type OpCode struct { - Tag byte - I1 int - I2 int - J1 int - J2 int -} - -// SequenceMatcher compares sequence of strings. The basic -// algorithm predates, and is a little fancier than, an algorithm -// published in the late 1980's by Ratcliff and Obershelp under the -// hyperbolic name "gestalt pattern matching". The basic idea is to find -// the longest contiguous matching subsequence that contains no "junk" -// elements (R-O doesn't address junk). The same idea is then applied -// recursively to the pieces of the sequences to the left and to the right -// of the matching subsequence. This does not yield minimal edit -// sequences, but does tend to yield matches that "look right" to people. -// -// SequenceMatcher tries to compute a "human-friendly diff" between two -// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the -// longest *contiguous* & junk-free matching subsequence. That's what -// catches peoples' eyes. The Windows(tm) windiff has another interesting -// notion, pairing up elements that appear uniquely in each sequence. -// That, and the method here, appear to yield more intuitive difference -// reports than does diff. This method appears to be the least vulnerable -// to synching up on blocks of "junk lines", though (like blank lines in -// ordinary text files, or maybe "

" lines in HTML files). That may be -// because this is the only method of the 3 that has a *concept* of -// "junk" . -// -// Timing: Basic R-O is cubic time worst case and quadratic time expected -// case. SequenceMatcher is quadratic time for the worst case and has -// expected-case behavior dependent in a complicated way on how many -// elements the sequences have in common; best case time is linear. -type SequenceMatcher struct { - a []string - b []string - b2j map[string][]int - IsJunk func(string) bool - autoJunk bool - bJunk map[string]struct{} - matchingBlocks []Match - fullBCount map[string]int - bPopular map[string]struct{} - opCodes []OpCode -} - -func NewMatcher(a, b []string) *SequenceMatcher { - m := SequenceMatcher{autoJunk: true} - m.SetSeqs(a, b) - return &m -} - -func NewMatcherWithJunk(a, b []string, autoJunk bool, - isJunk func(string) bool) *SequenceMatcher { - - m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} - m.SetSeqs(a, b) - return &m -} - -// Set two sequences to be compared. -func (m *SequenceMatcher) SetSeqs(a, b []string) { - m.SetSeq1(a) - m.SetSeq2(b) -} - -// Set the first sequence to be compared. The second sequence to be compared is -// not changed. -// -// SequenceMatcher computes and caches detailed information about the second -// sequence, so if you want to compare one sequence S against many sequences, -// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other -// sequences. -// -// See also SetSeqs() and SetSeq2(). -func (m *SequenceMatcher) SetSeq1(a []string) { - if &a == &m.a { - return - } - m.a = a - m.matchingBlocks = nil - m.opCodes = nil -} - -// Set the second sequence to be compared. The first sequence to be compared is -// not changed. -func (m *SequenceMatcher) SetSeq2(b []string) { - if &b == &m.b { - return - } - m.b = b - m.matchingBlocks = nil - m.opCodes = nil - m.fullBCount = nil - m.chainB() -} - -func (m *SequenceMatcher) chainB() { - // Populate line -> index mapping - b2j := map[string][]int{} - for i, s := range m.b { - indices := b2j[s] - indices = append(indices, i) - b2j[s] = indices - } - - // Purge junk elements - m.bJunk = map[string]struct{}{} - if m.IsJunk != nil { - junk := m.bJunk - for s, _ := range b2j { - if m.IsJunk(s) { - junk[s] = struct{}{} - } - } - for s, _ := range junk { - delete(b2j, s) - } - } - - // Purge remaining popular elements - popular := map[string]struct{}{} - n := len(m.b) - if m.autoJunk && n >= 200 { - ntest := n/100 + 1 - for s, indices := range b2j { - if len(indices) > ntest { - popular[s] = struct{}{} - } - } - for s, _ := range popular { - delete(b2j, s) - } - } - m.bPopular = popular - m.b2j = b2j -} - -func (m *SequenceMatcher) isBJunk(s string) bool { - _, ok := m.bJunk[s] - return ok -} - -// Find longest matching block in a[alo:ahi] and b[blo:bhi]. -// -// If IsJunk is not defined: -// -// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where -// alo <= i <= i+k <= ahi -// blo <= j <= j+k <= bhi -// and for all (i',j',k') meeting those conditions, -// k >= k' -// i <= i' -// and if i == i', j <= j' -// -// In other words, of all maximal matching blocks, return one that -// starts earliest in a, and of all those maximal matching blocks that -// start earliest in a, return the one that starts earliest in b. -// -// If IsJunk is defined, first the longest matching block is -// determined as above, but with the additional restriction that no -// junk element appears in the block. Then that block is extended as -// far as possible by matching (only) junk elements on both sides. So -// the resulting block never matches on junk except as identical junk -// happens to be adjacent to an "interesting" match. -// -// If no blocks match, return (alo, blo, 0). -func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { - // CAUTION: stripping common prefix or suffix would be incorrect. - // E.g., - // ab - // acab - // Longest matching block is "ab", but if common prefix is - // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so - // strip, so ends up claiming that ab is changed to acab by - // inserting "ca" in the middle. That's minimal but unintuitive: - // "it's obvious" that someone inserted "ac" at the front. - // Windiff ends up at the same place as diff, but by pairing up - // the unique 'b's and then matching the first two 'a's. - besti, bestj, bestsize := alo, blo, 0 - - // find longest junk-free match - // during an iteration of the loop, j2len[j] = length of longest - // junk-free match ending with a[i-1] and b[j] - j2len := map[int]int{} - for i := alo; i != ahi; i++ { - // look at all instances of a[i] in b; note that because - // b2j has no junk keys, the loop is skipped if a[i] is junk - newj2len := map[int]int{} - for _, j := range m.b2j[m.a[i]] { - // a[i] matches b[j] - if j < blo { - continue - } - if j >= bhi { - break - } - k := j2len[j-1] + 1 - newj2len[j] = k - if k > bestsize { - besti, bestj, bestsize = i-k+1, j-k+1, k - } - } - j2len = newj2len - } - - // Extend the best by non-junk elements on each end. In particular, - // "popular" non-junk elements aren't in b2j, which greatly speeds - // the inner loop above, but also means "the best" match so far - // doesn't contain any junk *or* popular non-junk elements. - for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - !m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - // Now that we have a wholly interesting match (albeit possibly - // empty!), we may as well suck up the matching junk on each - // side of it too. Can't think of a good reason not to, and it - // saves post-processing the (possibly considerable) expense of - // figuring out what to do with it. In the case of an empty - // interesting match, this is clearly the right thing to do, - // because no other kind of match is possible in the regions. - for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - return Match{A: besti, B: bestj, Size: bestsize} -} - -// Return list of triples describing matching subsequences. -// -// Each triple is of the form (i, j, n), and means that -// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in -// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are -// adjacent triples in the list, and the second is not the last triple in the -// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe -// adjacent equal blocks. -// -// The last triple is a dummy, (len(a), len(b), 0), and is the only -// triple with n==0. -func (m *SequenceMatcher) GetMatchingBlocks() []Match { - if m.matchingBlocks != nil { - return m.matchingBlocks - } - - var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match - matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { - match := m.findLongestMatch(alo, ahi, blo, bhi) - i, j, k := match.A, match.B, match.Size - if match.Size > 0 { - if alo < i && blo < j { - matched = matchBlocks(alo, i, blo, j, matched) - } - matched = append(matched, match) - if i+k < ahi && j+k < bhi { - matched = matchBlocks(i+k, ahi, j+k, bhi, matched) - } - } - return matched - } - matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) - - // It's possible that we have adjacent equal blocks in the - // matching_blocks list now. - nonAdjacent := []Match{} - i1, j1, k1 := 0, 0, 0 - for _, b := range matched { - // Is this block adjacent to i1, j1, k1? - i2, j2, k2 := b.A, b.B, b.Size - if i1+k1 == i2 && j1+k1 == j2 { - // Yes, so collapse them -- this just increases the length of - // the first block by the length of the second, and the first - // block so lengthened remains the block to compare against. - k1 += k2 - } else { - // Not adjacent. Remember the first block (k1==0 means it's - // the dummy we started with), and make the second block the - // new block to compare against. - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - i1, j1, k1 = i2, j2, k2 - } - } - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - - nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) - m.matchingBlocks = nonAdjacent - return m.matchingBlocks -} - -// Return list of 5-tuples describing how to turn a into b. -// -// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple -// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the -// tuple preceding it, and likewise for j1 == the previous j2. -// -// The tags are characters, with these meanings: -// -// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] -// -// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. -// -// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. -// -// 'e' (equal): a[i1:i2] == b[j1:j2] -func (m *SequenceMatcher) GetOpCodes() []OpCode { - if m.opCodes != nil { - return m.opCodes - } - i, j := 0, 0 - matching := m.GetMatchingBlocks() - opCodes := make([]OpCode, 0, len(matching)) - for _, m := range matching { - // invariant: we've pumped out correct diffs to change - // a[:i] into b[:j], and the next matching block is - // a[ai:ai+size] == b[bj:bj+size]. So we need to pump - // out a diff to change a[i:ai] into b[j:bj], pump out - // the matching block, and move (i,j) beyond the match - ai, bj, size := m.A, m.B, m.Size - tag := byte(0) - if i < ai && j < bj { - tag = 'r' - } else if i < ai { - tag = 'd' - } else if j < bj { - tag = 'i' - } - if tag > 0 { - opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) - } - i, j = ai+size, bj+size - // the list of matching blocks is terminated by a - // sentinel with size 0 - if size > 0 { - opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) - } - } - m.opCodes = opCodes - return m.opCodes -} - -// Isolate change clusters by eliminating ranges with no changes. -// -// Return a generator of groups with up to n lines of context. -// Each group is in the same format as returned by GetOpCodes(). -func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { - if n < 0 { - n = 3 - } - codes := m.GetOpCodes() - if len(codes) == 0 { - codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} - } - // Fixup leading and trailing groups if they show no changes. - if codes[0].Tag == 'e' { - c := codes[0] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} - } - if codes[len(codes)-1].Tag == 'e' { - c := codes[len(codes)-1] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} - } - nn := n + n - groups := [][]OpCode{} - group := []OpCode{} - for _, c := range codes { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - // End the current group and start a new one whenever - // there is a large range with no changes. - if c.Tag == 'e' && i2-i1 > nn { - group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), - j1, min(j2, j1+n)}) - groups = append(groups, group) - group = []OpCode{} - i1, j1 = max(i1, i2-n), max(j1, j2-n) - } - group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) - } - if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { - groups = append(groups, group) - } - return groups -} - -// Return a measure of the sequences' similarity (float in [0,1]). -// -// Where T is the total number of elements in both sequences, and -// M is the number of matches, this is 2.0*M / T. -// Note that this is 1 if the sequences are identical, and 0 if -// they have nothing in common. -// -// .Ratio() is expensive to compute if you haven't already computed -// .GetMatchingBlocks() or .GetOpCodes(), in which case you may -// want to try .QuickRatio() or .RealQuickRation() first to get an -// upper bound. -func (m *SequenceMatcher) Ratio() float64 { - matches := 0 - for _, m := range m.GetMatchingBlocks() { - matches += m.Size - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() relatively quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute. -func (m *SequenceMatcher) QuickRatio() float64 { - // viewing a and b as multisets, set matches to the cardinality - // of their intersection; this counts the number of matches - // without regard to order, so is clearly an upper bound - if m.fullBCount == nil { - m.fullBCount = map[string]int{} - for _, s := range m.b { - m.fullBCount[s] = m.fullBCount[s] + 1 - } - } - - // avail[x] is the number of times x appears in 'b' less the - // number of times we've seen it in 'a' so far ... kinda - avail := map[string]int{} - matches := 0 - for _, s := range m.a { - n, ok := avail[s] - if !ok { - n = m.fullBCount[s] - } - avail[s] = n - 1 - if n > 0 { - matches += 1 - } - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() very quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute than either .Ratio() or .QuickRatio(). -func (m *SequenceMatcher) RealQuickRatio() float64 { - la, lb := len(m.a), len(m.b) - return calculateRatio(min(la, lb), la+lb) -} - -// Convert range to the "ed" format -func formatRangeUnified(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 1 { - return fmt.Sprintf("%d", beginning) - } - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - return fmt.Sprintf("%d,%d", beginning, length) -} - -// Unified diff parameters -type UnifiedDiff struct { - A []string // First sequence lines - FromFile string // First file name - FromDate string // First file time - B []string // Second sequence lines - ToFile string // Second file name - ToDate string // Second file time - Eol string // Headers end of line, defaults to LF - Context int // Number of context lines -} - -// Compare two sequences of lines; generate the delta as a unified diff. -// -// Unified diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by 'n' which -// defaults to three. -// -// By default, the diff control lines (those with ---, +++, or @@) are -// created with a trailing newline. This is helpful so that inputs -// created from file.readlines() result in diffs that are suitable for -// file.writelines() since both the inputs and outputs have trailing -// newlines. -// -// For inputs that do not have trailing newlines, set the lineterm -// argument to "" so that the output will be uniformly newline free. -// -// The unidiff format normally has a header for filenames and modification -// times. Any or all of these may be specified using strings for -// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -// The modification times are normally expressed in the ISO 8601 format. -func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - wf := func(format string, args ...interface{}) error { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - return err - } - ws := func(s string) error { - _, err := buf.WriteString(s) - return err - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) - if err != nil { - return err - } - err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) - if err != nil { - return err - } - } - } - first, last := g[0], g[len(g)-1] - range1 := formatRangeUnified(first.I1, last.I2) - range2 := formatRangeUnified(first.J1, last.J2) - if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { - return err - } - for _, c := range g { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - if c.Tag == 'e' { - for _, line := range diff.A[i1:i2] { - if err := ws(" " + line); err != nil { - return err - } - } - continue - } - if c.Tag == 'r' || c.Tag == 'd' { - for _, line := range diff.A[i1:i2] { - if err := ws("-" + line); err != nil { - return err - } - } - } - if c.Tag == 'r' || c.Tag == 'i' { - for _, line := range diff.B[j1:j2] { - if err := ws("+" + line); err != nil { - return err - } - } - } - } - } - return nil -} - -// Like WriteUnifiedDiff but returns the diff a string. -func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteUnifiedDiff(w, diff) - return string(w.Bytes()), err -} - -// Convert range to the "ed" format. -func formatRangeContext(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - if length <= 1 { - return fmt.Sprintf("%d", beginning) - } - return fmt.Sprintf("%d,%d", beginning, beginning+length-1) -} - -type ContextDiff UnifiedDiff - -// Compare two sequences of lines; generate the delta as a context diff. -// -// Context diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by diff.Context -// which defaults to three. -// -// By default, the diff control lines (those with *** or ---) are -// created with a trailing newline. -// -// For inputs that do not have trailing newlines, set the diff.Eol -// argument to "" so that the output will be uniformly newline free. -// -// The context diff format normally has a header for filenames and -// modification times. Any or all of these may be specified using -// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. -// The modification times are normally expressed in the ISO 8601 format. -// If not specified, the strings default to blanks. -func WriteContextDiff(writer io.Writer, diff ContextDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - var diffErr error - wf := func(format string, args ...interface{}) { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - if diffErr == nil && err != nil { - diffErr = err - } - } - ws := func(s string) { - _, err := buf.WriteString(s) - if diffErr == nil && err != nil { - diffErr = err - } - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - prefix := map[byte]string{ - 'i': "+ ", - 'd': "- ", - 'r': "! ", - 'e': " ", - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) - wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) - } - } - - first, last := g[0], g[len(g)-1] - ws("***************" + diff.Eol) - - range1 := formatRangeContext(first.I1, last.I2) - wf("*** %s ****%s", range1, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'd' { - for _, cc := range g { - if cc.Tag == 'i' { - continue - } - for _, line := range diff.A[cc.I1:cc.I2] { - ws(prefix[cc.Tag] + line) - } - } - break - } - } - - range2 := formatRangeContext(first.J1, last.J2) - wf("--- %s ----%s", range2, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'i' { - for _, cc := range g { - if cc.Tag == 'd' { - continue - } - for _, line := range diff.B[cc.J1:cc.J2] { - ws(prefix[cc.Tag] + line) - } - } - break - } - } - } - return diffErr -} - -// Like WriteContextDiff but returns the diff a string. -func GetContextDiffString(diff ContextDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteContextDiff(w, diff) - return string(w.Bytes()), err -} - -// Split a string on "\n" while preserving them. The output can be used -// as input for UnifiedDiff and ContextDiff structures. -func SplitLines(s string) []string { - lines := strings.SplitAfter(s, "\n") - lines[len(lines)-1] += "\n" - return lines -} diff --git a/vendor/repo-infra/vendor/golang.org/x/build/AUTHORS b/vendor/repo-infra/vendor/golang.org/x/build/AUTHORS deleted file mode 100644 index 15167cd746..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/build/AUTHORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code refers to The Go Authors for copyright purposes. -# The master list of authors is in the main Go distribution, -# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/repo-infra/vendor/golang.org/x/build/CONTRIBUTORS b/vendor/repo-infra/vendor/golang.org/x/build/CONTRIBUTORS deleted file mode 100644 index 1c4577e968..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/build/CONTRIBUTORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code was written by the Go contributors. -# The master list of contributors is in the main Go distribution, -# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/repo-infra/vendor/golang.org/x/build/LICENSE b/vendor/repo-infra/vendor/golang.org/x/build/LICENSE deleted file mode 100644 index 6a66aea5ea..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/build/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/repo-infra/vendor/golang.org/x/build/PATENTS b/vendor/repo-infra/vendor/golang.org/x/build/PATENTS deleted file mode 100644 index 733099041f..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/build/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/repo-infra/vendor/golang.org/x/build/pargzip/BUILD.bazel b/vendor/repo-infra/vendor/golang.org/x/build/pargzip/BUILD.bazel deleted file mode 100644 index bd69aba2e2..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/build/pargzip/BUILD.bazel +++ /dev/null @@ -1,9 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = ["pargzip.go"], - importmap = "k8s.io/repo-infra/vendor/golang.org/x/build/pargzip", - importpath = "golang.org/x/build/pargzip", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/golang.org/x/build/pargzip/pargzip.go b/vendor/repo-infra/vendor/golang.org/x/build/pargzip/pargzip.go deleted file mode 100644 index 33ea3146dd..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/build/pargzip/pargzip.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package pargzip contains a parallel gzip writer implementation. By -// compressing each chunk of data in parallel, all the CPUs on the -// machine can be used, at a slight loss of compression efficiency. -package pargzip - -import ( - "bufio" - "bytes" - "compress/gzip" - "io" - "runtime" - "strings" - "sync" -) - -// A Writer is an io.WriteCloser. -// Writes to a Writer are compressed and written to w. -// -// Any exported fields may only be mutated before the first call to -// Write. -type Writer struct { - // ChunkSize is the number of bytes to gzip at once. - // The default from NewWriter is 1MB. - ChunkSize int - - // Parallel is the number of chunks to compress in parallel. - // The default from NewWriter is runtime.NumCPU(). - Parallel int - - w io.Writer - bw *bufio.Writer - - allWritten chan struct{} // when writing goroutine ends - wasWriteErr chan struct{} // closed after 'err' set - - sem chan bool // semaphore bounding compressions in flight - chunkc chan *writeChunk // closed on Close - - mu sync.Mutex // guards following - closed bool - err error // sticky write error -} - -type writeChunk struct { - zw *Writer - p string // uncompressed - - donec chan struct{} // closed on completion - - // one of following is set: - z []byte // compressed - err error // exec error -} - -// compress runs the gzip child process. -// It runs in its own goroutine. -func (c *writeChunk) compress() (err error) { - defer func() { - if err != nil { - c.err = err - } - close(c.donec) - <-c.zw.sem - }() - var zbuf bytes.Buffer - zw := gzip.NewWriter(&zbuf) - if _, err := io.Copy(zw, strings.NewReader(c.p)); err != nil { - return err - } - if err := zw.Close(); err != nil { - return err - } - c.z = zbuf.Bytes() - return nil -} - -// NewWriter returns a new Writer. -// Writes to the returned writer are compressed and written to w. -// -// It is the caller's responsibility to call Close on the WriteCloser -// when done. Writes may be buffered and not flushed until Close. -// -// Any fields on Writer may only be modified before the first call to -// Write. -func NewWriter(w io.Writer) *Writer { - return &Writer{ - w: w, - allWritten: make(chan struct{}), - wasWriteErr: make(chan struct{}), - - ChunkSize: 1 << 20, - Parallel: runtime.NumCPU(), - } -} - -func (w *Writer) didInit() bool { return w.bw != nil } - -func (w *Writer) init() { - w.bw = bufio.NewWriterSize(newChunkWriter{w}, w.ChunkSize) - w.chunkc = make(chan *writeChunk, w.Parallel+1) - w.sem = make(chan bool, w.Parallel) - go func() { - defer close(w.allWritten) - for c := range w.chunkc { - if err := w.writeCompressedChunk(c); err != nil { - close(w.wasWriteErr) - return - } - } - }() -} - -func (w *Writer) startChunk(p []byte) { - w.sem <- true // block until we can begin - c := &writeChunk{ - zw: w, - p: string(p), // string, since the bufio.Writer owns the slice - donec: make(chan struct{}), - } - go c.compress() // receives from w.sem - select { - case w.chunkc <- c: - case <-w.wasWriteErr: - // Discard chunks that come after any chunk that failed - // to write. - } -} - -func (w *Writer) writeCompressedChunk(c *writeChunk) (err error) { - defer func() { - if err != nil { - w.mu.Lock() - defer w.mu.Unlock() - if w.err == nil { - w.err = err - } - } - }() - <-c.donec - if c.err != nil { - return c.err - } - _, err = w.w.Write(c.z) - return -} - -func (w *Writer) Write(p []byte) (n int, err error) { - if !w.didInit() { - w.init() - } - return w.bw.Write(p) -} - -func (w *Writer) Close() error { - w.mu.Lock() - err, wasClosed := w.err, w.closed - w.closed = true - w.mu.Unlock() - if wasClosed { - return nil - } - if !w.didInit() { - return nil - } - if err != nil { - return err - } - - w.bw.Flush() - close(w.chunkc) - <-w.allWritten // wait for writing goroutine to end - - w.mu.Lock() - err = w.err - w.mu.Unlock() - return err -} - -// newChunkWriter gets large chunks to compress and write to zw. -type newChunkWriter struct { - zw *Writer -} - -func (cw newChunkWriter) Write(p []byte) (n int, err error) { - n = len(p) - max := cw.zw.ChunkSize - for len(p) > 0 { - chunk := p - if len(chunk) > max { - chunk = chunk[:max] - } - p = p[len(chunk):] - cw.zw.startChunk(chunk) - } - return -} diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/AUTHORS b/vendor/repo-infra/vendor/golang.org/x/tools/AUTHORS deleted file mode 100644 index 15167cd746..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/AUTHORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code refers to The Go Authors for copyright purposes. -# The master list of authors is in the main Go distribution, -# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/repo-infra/vendor/golang.org/x/tools/CONTRIBUTORS deleted file mode 100644 index 1c4577e968..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/CONTRIBUTORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code was written by the Go contributors. -# The master list of contributors is in the main Go distribution, -# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/LICENSE b/vendor/repo-infra/vendor/golang.org/x/tools/LICENSE deleted file mode 100644 index 6a66aea5ea..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/PATENTS b/vendor/repo-infra/vendor/golang.org/x/tools/PATENTS deleted file mode 100644 index 733099041f..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/cmd/getgo/LICENSE b/vendor/repo-infra/vendor/golang.org/x/tools/cmd/getgo/LICENSE deleted file mode 100644 index 32017f8fa1..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/cmd/getgo/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2017 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/BUILD.bazel b/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/BUILD.bazel deleted file mode 100644 index 960dc32d92..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/BUILD.bazel +++ /dev/null @@ -1,14 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "discovery.go", - "env.go", - "http.go", - "vcs.go", - ], - importmap = "k8s.io/repo-infra/vendor/golang.org/x/tools/go/vcs", - importpath = "golang.org/x/tools/go/vcs", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/discovery.go b/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/discovery.go deleted file mode 100644 index f431dc1c5b..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/discovery.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package vcs - -import ( - "encoding/xml" - "fmt" - "io" - "strings" -) - -// charsetReader returns a reader for the given charset. Currently -// it only supports UTF-8 and ASCII. Otherwise, it returns a meaningful -// error which is printed by go get, so the user can find why the package -// wasn't downloaded if the encoding is not supported. Note that, in -// order to reduce potential errors, ASCII is treated as UTF-8 (i.e. characters -// greater than 0x7f are not rejected). -func charsetReader(charset string, input io.Reader) (io.Reader, error) { - switch strings.ToLower(charset) { - case "ascii": - return input, nil - default: - return nil, fmt.Errorf("can't decode XML document using charset %q", charset) - } -} - -// parseMetaGoImports returns meta imports from the HTML in r. -// Parsing ends at the end of the section or the beginning of the . -func parseMetaGoImports(r io.Reader) (imports []metaImport, err error) { - d := xml.NewDecoder(r) - d.CharsetReader = charsetReader - d.Strict = false - var t xml.Token - for { - t, err = d.Token() - if err != nil { - if err == io.EOF || len(imports) > 0 { - err = nil - } - return - } - if e, ok := t.(xml.StartElement); ok && strings.EqualFold(e.Name.Local, "body") { - return - } - if e, ok := t.(xml.EndElement); ok && strings.EqualFold(e.Name.Local, "head") { - return - } - e, ok := t.(xml.StartElement) - if !ok || !strings.EqualFold(e.Name.Local, "meta") { - continue - } - if attrValue(e.Attr, "name") != "go-import" { - continue - } - if f := strings.Fields(attrValue(e.Attr, "content")); len(f) == 3 { - imports = append(imports, metaImport{ - Prefix: f[0], - VCS: f[1], - RepoRoot: f[2], - }) - } - } -} - -// attrValue returns the attribute value for the case-insensitive key -// `name', or the empty string if nothing is found. -func attrValue(attrs []xml.Attr, name string) string { - for _, a := range attrs { - if strings.EqualFold(a.Name.Local, name) { - return a.Value - } - } - return "" -} diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/env.go b/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/env.go deleted file mode 100644 index e846f5b3b8..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/env.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package vcs - -import ( - "os" - "strings" -) - -// envForDir returns a copy of the environment -// suitable for running in the given directory. -// The environment is the current process's environment -// but with an updated $PWD, so that an os.Getwd in the -// child will be faster. -func envForDir(dir string) []string { - env := os.Environ() - // Internally we only use rooted paths, so dir is rooted. - // Even if dir is not rooted, no harm done. - return mergeEnvLists([]string{"PWD=" + dir}, env) -} - -// mergeEnvLists merges the two environment lists such that -// variables with the same name in "in" replace those in "out". -func mergeEnvLists(in, out []string) []string { -NextVar: - for _, inkv := range in { - k := strings.SplitAfterN(inkv, "=", 2)[0] - for i, outkv := range out { - if strings.HasPrefix(outkv, k) { - out[i] = inkv - continue NextVar - } - } - out = append(out, inkv) - } - return out -} diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/http.go b/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/http.go deleted file mode 100644 index 96188185cb..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/http.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package vcs - -import ( - "fmt" - "io" - "io/ioutil" - "log" - "net/http" - "net/url" -) - -// httpClient is the default HTTP client, but a variable so it can be -// changed by tests, without modifying http.DefaultClient. -var httpClient = http.DefaultClient - -// httpGET returns the data from an HTTP GET request for the given URL. -func httpGET(url string) ([]byte, error) { - resp, err := httpClient.Get(url) - if err != nil { - return nil, err - } - defer resp.Body.Close() - if resp.StatusCode != 200 { - return nil, fmt.Errorf("%s: %s", url, resp.Status) - } - b, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("%s: %v", url, err) - } - return b, nil -} - -// httpsOrHTTP returns the body of either the importPath's -// https resource or, if unavailable, the http resource. -func httpsOrHTTP(importPath string) (urlStr string, body io.ReadCloser, err error) { - fetch := func(scheme string) (urlStr string, res *http.Response, err error) { - u, err := url.Parse(scheme + "://" + importPath) - if err != nil { - return "", nil, err - } - u.RawQuery = "go-get=1" - urlStr = u.String() - if Verbose { - log.Printf("Fetching %s", urlStr) - } - res, err = httpClient.Get(urlStr) - return - } - closeBody := func(res *http.Response) { - if res != nil { - res.Body.Close() - } - } - urlStr, res, err := fetch("https") - if err != nil || res.StatusCode != 200 { - if Verbose { - if err != nil { - log.Printf("https fetch failed.") - } else { - log.Printf("ignoring https fetch with status code %d", res.StatusCode) - } - } - closeBody(res) - urlStr, res, err = fetch("http") - } - if err != nil { - closeBody(res) - return "", nil, err - } - // Note: accepting a non-200 OK here, so people can serve a - // meta import in their http 404 page. - if Verbose { - log.Printf("Parsing meta tags from %s (status code %d)", urlStr, res.StatusCode) - } - return urlStr, res.Body, nil -} diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/vcs.go b/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/vcs.go deleted file mode 100644 index 89319be04b..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/go/vcs/vcs.go +++ /dev/null @@ -1,755 +0,0 @@ -// Copyright 2012 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package vcs exposes functions for resolving import paths -// and using version control systems, which can be used to -// implement behavior similar to the standard "go get" command. -// -// This package is a copy of internal code in package cmd/go/internal/get, -// modified to make the identifiers exported. It's provided here -// for developers who want to write tools with similar semantics. -// It needs to be manually kept in sync with upstream when changes are -// made to cmd/go/internal/get; see https://golang.org/issues/11490. -// -package vcs // import "golang.org/x/tools/go/vcs" - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "log" - "net/url" - "os" - "os/exec" - "path/filepath" - "regexp" - "strconv" - "strings" -) - -// Verbose enables verbose operation logging. -var Verbose bool - -// ShowCmd controls whether VCS commands are printed. -var ShowCmd bool - -// A Cmd describes how to use a version control system -// like Mercurial, Git, or Subversion. -type Cmd struct { - Name string - Cmd string // name of binary to invoke command - - CreateCmd string // command to download a fresh copy of a repository - DownloadCmd string // command to download updates into an existing repository - - TagCmd []TagCmd // commands to list tags - TagLookupCmd []TagCmd // commands to lookup tags before running tagSyncCmd - TagSyncCmd string // command to sync to specific tag - TagSyncDefault string // command to sync to default tag - - LogCmd string // command to list repository changelogs in an XML format - - Scheme []string - PingCmd string -} - -// A TagCmd describes a command to list available tags -// that can be passed to Cmd.TagSyncCmd. -type TagCmd struct { - Cmd string // command to list tags - Pattern string // regexp to extract tags from list -} - -// vcsList lists the known version control systems -var vcsList = []*Cmd{ - vcsHg, - vcsGit, - vcsSvn, - vcsBzr, -} - -// ByCmd returns the version control system for the given -// command name (hg, git, svn, bzr). -func ByCmd(cmd string) *Cmd { - for _, vcs := range vcsList { - if vcs.Cmd == cmd { - return vcs - } - } - return nil -} - -// vcsHg describes how to use Mercurial. -var vcsHg = &Cmd{ - Name: "Mercurial", - Cmd: "hg", - - CreateCmd: "clone -U {repo} {dir}", - DownloadCmd: "pull", - - // We allow both tag and branch names as 'tags' - // for selecting a version. This lets people have - // a go.release.r60 branch and a go1 branch - // and make changes in both, without constantly - // editing .hgtags. - TagCmd: []TagCmd{ - {"tags", `^(\S+)`}, - {"branches", `^(\S+)`}, - }, - TagSyncCmd: "update -r {tag}", - TagSyncDefault: "update default", - - LogCmd: "log --encoding=utf-8 --limit={limit} --template={template}", - - Scheme: []string{"https", "http", "ssh"}, - PingCmd: "identify {scheme}://{repo}", -} - -// vcsGit describes how to use Git. -var vcsGit = &Cmd{ - Name: "Git", - Cmd: "git", - - CreateCmd: "clone {repo} {dir}", - DownloadCmd: "pull --ff-only", - - TagCmd: []TagCmd{ - // tags/xxx matches a git tag named xxx - // origin/xxx matches a git branch named xxx on the default remote repository - {"show-ref", `(?:tags|origin)/(\S+)$`}, - }, - TagLookupCmd: []TagCmd{ - {"show-ref tags/{tag} origin/{tag}", `((?:tags|origin)/\S+)$`}, - }, - TagSyncCmd: "checkout {tag}", - TagSyncDefault: "checkout master", - - Scheme: []string{"git", "https", "http", "git+ssh"}, - PingCmd: "ls-remote {scheme}://{repo}", -} - -// vcsBzr describes how to use Bazaar. -var vcsBzr = &Cmd{ - Name: "Bazaar", - Cmd: "bzr", - - CreateCmd: "branch {repo} {dir}", - - // Without --overwrite bzr will not pull tags that changed. - // Replace by --overwrite-tags after http://pad.lv/681792 goes in. - DownloadCmd: "pull --overwrite", - - TagCmd: []TagCmd{{"tags", `^(\S+)`}}, - TagSyncCmd: "update -r {tag}", - TagSyncDefault: "update -r revno:-1", - - Scheme: []string{"https", "http", "bzr", "bzr+ssh"}, - PingCmd: "info {scheme}://{repo}", -} - -// vcsSvn describes how to use Subversion. -var vcsSvn = &Cmd{ - Name: "Subversion", - Cmd: "svn", - - CreateCmd: "checkout {repo} {dir}", - DownloadCmd: "update", - - // There is no tag command in subversion. - // The branch information is all in the path names. - - LogCmd: "log --xml --limit={limit}", - - Scheme: []string{"https", "http", "svn", "svn+ssh"}, - PingCmd: "info {scheme}://{repo}", -} - -func (v *Cmd) String() string { - return v.Name -} - -// run runs the command line cmd in the given directory. -// keyval is a list of key, value pairs. run expands -// instances of {key} in cmd into value, but only after -// splitting cmd into individual arguments. -// If an error occurs, run prints the command line and the -// command's combined stdout+stderr to standard error. -// Otherwise run discards the command's output. -func (v *Cmd) run(dir string, cmd string, keyval ...string) error { - _, err := v.run1(dir, cmd, keyval, true) - return err -} - -// runVerboseOnly is like run but only generates error output to standard error in verbose mode. -func (v *Cmd) runVerboseOnly(dir string, cmd string, keyval ...string) error { - _, err := v.run1(dir, cmd, keyval, false) - return err -} - -// runOutput is like run but returns the output of the command. -func (v *Cmd) runOutput(dir string, cmd string, keyval ...string) ([]byte, error) { - return v.run1(dir, cmd, keyval, true) -} - -// run1 is the generalized implementation of run and runOutput. -func (v *Cmd) run1(dir string, cmdline string, keyval []string, verbose bool) ([]byte, error) { - m := make(map[string]string) - for i := 0; i < len(keyval); i += 2 { - m[keyval[i]] = keyval[i+1] - } - args := strings.Fields(cmdline) - for i, arg := range args { - args[i] = expand(m, arg) - } - - _, err := exec.LookPath(v.Cmd) - if err != nil { - fmt.Fprintf(os.Stderr, - "go: missing %s command. See http://golang.org/s/gogetcmd\n", - v.Name) - return nil, err - } - - cmd := exec.Command(v.Cmd, args...) - cmd.Dir = dir - cmd.Env = envForDir(cmd.Dir) - if ShowCmd { - fmt.Printf("cd %s\n", dir) - fmt.Printf("%s %s\n", v.Cmd, strings.Join(args, " ")) - } - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - err = cmd.Run() - out := buf.Bytes() - if err != nil { - if verbose || Verbose { - fmt.Fprintf(os.Stderr, "# cd %s; %s %s\n", dir, v.Cmd, strings.Join(args, " ")) - os.Stderr.Write(out) - } - return nil, err - } - return out, nil -} - -// Ping pings the repo to determine if scheme used is valid. -// This repo must be pingable with this scheme and VCS. -func (v *Cmd) Ping(scheme, repo string) error { - return v.runVerboseOnly(".", v.PingCmd, "scheme", scheme, "repo", repo) -} - -// Create creates a new copy of repo in dir. -// The parent of dir must exist; dir must not. -func (v *Cmd) Create(dir, repo string) error { - return v.run(".", v.CreateCmd, "dir", dir, "repo", repo) -} - -// CreateAtRev creates a new copy of repo in dir at revision rev. -// The parent of dir must exist; dir must not. -// rev must be a valid revision in repo. -func (v *Cmd) CreateAtRev(dir, repo, rev string) error { - if err := v.Create(dir, repo); err != nil { - return err - } - return v.run(dir, v.TagSyncCmd, "tag", rev) -} - -// Download downloads any new changes for the repo in dir. -// dir must be a valid VCS repo compatible with v. -func (v *Cmd) Download(dir string) error { - return v.run(dir, v.DownloadCmd) -} - -// Tags returns the list of available tags for the repo in dir. -// dir must be a valid VCS repo compatible with v. -func (v *Cmd) Tags(dir string) ([]string, error) { - var tags []string - for _, tc := range v.TagCmd { - out, err := v.runOutput(dir, tc.Cmd) - if err != nil { - return nil, err - } - re := regexp.MustCompile(`(?m-s)` + tc.Pattern) - for _, m := range re.FindAllStringSubmatch(string(out), -1) { - tags = append(tags, m[1]) - } - } - return tags, nil -} - -// TagSync syncs the repo in dir to the named tag, which is either a -// tag returned by Tags or the empty string (the default tag). -// dir must be a valid VCS repo compatible with v and the tag must exist. -func (v *Cmd) TagSync(dir, tag string) error { - if v.TagSyncCmd == "" { - return nil - } - if tag != "" { - for _, tc := range v.TagLookupCmd { - out, err := v.runOutput(dir, tc.Cmd, "tag", tag) - if err != nil { - return err - } - re := regexp.MustCompile(`(?m-s)` + tc.Pattern) - m := re.FindStringSubmatch(string(out)) - if len(m) > 1 { - tag = m[1] - break - } - } - } - if tag == "" && v.TagSyncDefault != "" { - return v.run(dir, v.TagSyncDefault) - } - return v.run(dir, v.TagSyncCmd, "tag", tag) -} - -// Log logs the changes for the repo in dir. -// dir must be a valid VCS repo compatible with v. -func (v *Cmd) Log(dir, logTemplate string) ([]byte, error) { - if err := v.Download(dir); err != nil { - return []byte{}, err - } - - const N = 50 // how many revisions to grab - return v.runOutput(dir, v.LogCmd, "limit", strconv.Itoa(N), "template", logTemplate) -} - -// LogAtRev logs the change for repo in dir at the rev revision. -// dir must be a valid VCS repo compatible with v. -// rev must be a valid revision for the repo in dir. -func (v *Cmd) LogAtRev(dir, rev, logTemplate string) ([]byte, error) { - if err := v.Download(dir); err != nil { - return []byte{}, err - } - - // Append revision flag to LogCmd. - logAtRevCmd := v.LogCmd + " --rev=" + rev - return v.runOutput(dir, logAtRevCmd, "limit", strconv.Itoa(1), "template", logTemplate) -} - -// A vcsPath describes how to convert an import path into a -// version control system and repository name. -type vcsPath struct { - prefix string // prefix this description applies to - re string // pattern for import path - repo string // repository to use (expand with match of re) - vcs string // version control system to use (expand with match of re) - check func(match map[string]string) error // additional checks - ping bool // ping for scheme to use to download repo - - regexp *regexp.Regexp // cached compiled form of re -} - -// FromDir inspects dir and its parents to determine the -// version control system and code repository to use. -// On return, root is the import path -// corresponding to the root of the repository. -func FromDir(dir, srcRoot string) (vcs *Cmd, root string, err error) { - // Clean and double-check that dir is in (a subdirectory of) srcRoot. - dir = filepath.Clean(dir) - srcRoot = filepath.Clean(srcRoot) - if len(dir) <= len(srcRoot) || dir[len(srcRoot)] != filepath.Separator { - return nil, "", fmt.Errorf("directory %q is outside source root %q", dir, srcRoot) - } - - var vcsRet *Cmd - var rootRet string - - origDir := dir - for len(dir) > len(srcRoot) { - for _, vcs := range vcsList { - if _, err := os.Stat(filepath.Join(dir, "."+vcs.Cmd)); err == nil { - root := filepath.ToSlash(dir[len(srcRoot)+1:]) - // Record first VCS we find, but keep looking, - // to detect mistakes like one kind of VCS inside another. - if vcsRet == nil { - vcsRet = vcs - rootRet = root - continue - } - // Allow .git inside .git, which can arise due to submodules. - if vcsRet == vcs && vcs.Cmd == "git" { - continue - } - // Otherwise, we have one VCS inside a different VCS. - return nil, "", fmt.Errorf("directory %q uses %s, but parent %q uses %s", - filepath.Join(srcRoot, rootRet), vcsRet.Cmd, filepath.Join(srcRoot, root), vcs.Cmd) - } - } - - // Move to parent. - ndir := filepath.Dir(dir) - if len(ndir) >= len(dir) { - // Shouldn't happen, but just in case, stop. - break - } - dir = ndir - } - - if vcsRet != nil { - return vcsRet, rootRet, nil - } - - return nil, "", fmt.Errorf("directory %q is not using a known version control system", origDir) -} - -// RepoRoot represents a version control system, a repo, and a root of -// where to put it on disk. -type RepoRoot struct { - VCS *Cmd - - // Repo is the repository URL, including scheme. - Repo string - - // Root is the import path corresponding to the root of the - // repository. - Root string -} - -// RepoRootForImportPath analyzes importPath to determine the -// version control system, and code repository to use. -func RepoRootForImportPath(importPath string, verbose bool) (*RepoRoot, error) { - rr, err := RepoRootForImportPathStatic(importPath, "") - if err == errUnknownSite { - rr, err = RepoRootForImportDynamic(importPath, verbose) - - // RepoRootForImportDynamic returns error detail - // that is irrelevant if the user didn't intend to use a - // dynamic import in the first place. - // Squelch it. - if err != nil { - if Verbose { - log.Printf("import %q: %v", importPath, err) - } - err = fmt.Errorf("unrecognized import path %q", importPath) - } - } - - if err == nil && strings.Contains(importPath, "...") && strings.Contains(rr.Root, "...") { - // Do not allow wildcards in the repo root. - rr = nil - err = fmt.Errorf("cannot expand ... in %q", importPath) - } - return rr, err -} - -var errUnknownSite = errors.New("dynamic lookup required to find mapping") - -// RepoRootForImportPathStatic attempts to map importPath to a -// RepoRoot using the commonly-used VCS hosting sites in vcsPaths -// (github.com/user/dir), or from a fully-qualified importPath already -// containing its VCS type (foo.com/repo.git/dir) -// -// If scheme is non-empty, that scheme is forced. -func RepoRootForImportPathStatic(importPath, scheme string) (*RepoRoot, error) { - if strings.Contains(importPath, "://") { - return nil, fmt.Errorf("invalid import path %q", importPath) - } - for _, srv := range vcsPaths { - if !strings.HasPrefix(importPath, srv.prefix) { - continue - } - m := srv.regexp.FindStringSubmatch(importPath) - if m == nil { - if srv.prefix != "" { - return nil, fmt.Errorf("invalid %s import path %q", srv.prefix, importPath) - } - continue - } - - // Build map of named subexpression matches for expand. - match := map[string]string{ - "prefix": srv.prefix, - "import": importPath, - } - for i, name := range srv.regexp.SubexpNames() { - if name != "" && match[name] == "" { - match[name] = m[i] - } - } - if srv.vcs != "" { - match["vcs"] = expand(match, srv.vcs) - } - if srv.repo != "" { - match["repo"] = expand(match, srv.repo) - } - if srv.check != nil { - if err := srv.check(match); err != nil { - return nil, err - } - } - vcs := ByCmd(match["vcs"]) - if vcs == nil { - return nil, fmt.Errorf("unknown version control system %q", match["vcs"]) - } - if srv.ping { - if scheme != "" { - match["repo"] = scheme + "://" + match["repo"] - } else { - for _, scheme := range vcs.Scheme { - if vcs.Ping(scheme, match["repo"]) == nil { - match["repo"] = scheme + "://" + match["repo"] - break - } - } - } - } - rr := &RepoRoot{ - VCS: vcs, - Repo: match["repo"], - Root: match["root"], - } - return rr, nil - } - return nil, errUnknownSite -} - -// RepoRootForImportDynamic finds a *RepoRoot for a custom domain that's not -// statically known by RepoRootForImportPathStatic. -// -// This handles custom import paths like "name.tld/pkg/foo" or just "name.tld". -func RepoRootForImportDynamic(importPath string, verbose bool) (*RepoRoot, error) { - slash := strings.Index(importPath, "/") - if slash < 0 { - slash = len(importPath) - } - host := importPath[:slash] - if !strings.Contains(host, ".") { - return nil, errors.New("import path doesn't contain a hostname") - } - urlStr, body, err := httpsOrHTTP(importPath) - if err != nil { - return nil, fmt.Errorf("http/https fetch: %v", err) - } - defer body.Close() - imports, err := parseMetaGoImports(body) - if err != nil { - return nil, fmt.Errorf("parsing %s: %v", importPath, err) - } - metaImport, err := matchGoImport(imports, importPath) - if err != nil { - if err != errNoMatch { - return nil, fmt.Errorf("parse %s: %v", urlStr, err) - } - return nil, fmt.Errorf("parse %s: no go-import meta tags", urlStr) - } - if verbose { - log.Printf("get %q: found meta tag %#v at %s", importPath, metaImport, urlStr) - } - // If the import was "uni.edu/bob/project", which said the - // prefix was "uni.edu" and the RepoRoot was "evilroot.com", - // make sure we don't trust Bob and check out evilroot.com to - // "uni.edu" yet (possibly overwriting/preempting another - // non-evil student). Instead, first verify the root and see - // if it matches Bob's claim. - if metaImport.Prefix != importPath { - if verbose { - log.Printf("get %q: verifying non-authoritative meta tag", importPath) - } - urlStr0 := urlStr - urlStr, body, err = httpsOrHTTP(metaImport.Prefix) - if err != nil { - return nil, fmt.Errorf("fetch %s: %v", urlStr, err) - } - imports, err := parseMetaGoImports(body) - if err != nil { - return nil, fmt.Errorf("parsing %s: %v", importPath, err) - } - if len(imports) == 0 { - return nil, fmt.Errorf("fetch %s: no go-import meta tag", urlStr) - } - metaImport2, err := matchGoImport(imports, importPath) - if err != nil || metaImport != metaImport2 { - return nil, fmt.Errorf("%s and %s disagree about go-import for %s", urlStr0, urlStr, metaImport.Prefix) - } - } - - if err := validateRepoRoot(metaImport.RepoRoot); err != nil { - return nil, fmt.Errorf("%s: invalid repo root %q: %v", urlStr, metaImport.RepoRoot, err) - } - rr := &RepoRoot{ - VCS: ByCmd(metaImport.VCS), - Repo: metaImport.RepoRoot, - Root: metaImport.Prefix, - } - if rr.VCS == nil { - return nil, fmt.Errorf("%s: unknown vcs %q", urlStr, metaImport.VCS) - } - return rr, nil -} - -// validateRepoRoot returns an error if repoRoot does not seem to be -// a valid URL with scheme. -func validateRepoRoot(repoRoot string) error { - url, err := url.Parse(repoRoot) - if err != nil { - return err - } - if url.Scheme == "" { - return errors.New("no scheme") - } - return nil -} - -// metaImport represents the parsed tags from HTML files. -type metaImport struct { - Prefix, VCS, RepoRoot string -} - -// errNoMatch is returned from matchGoImport when there's no applicable match. -var errNoMatch = errors.New("no import match") - -// matchGoImport returns the metaImport from imports matching importPath. -// An error is returned if there are multiple matches. -// errNoMatch is returned if none match. -func matchGoImport(imports []metaImport, importPath string) (_ metaImport, err error) { - match := -1 - for i, im := range imports { - if !strings.HasPrefix(importPath, im.Prefix) { - continue - } - if match != -1 { - err = fmt.Errorf("multiple meta tags match import path %q", importPath) - return - } - match = i - } - if match == -1 { - err = errNoMatch - return - } - return imports[match], nil -} - -// expand rewrites s to replace {k} with match[k] for each key k in match. -func expand(match map[string]string, s string) string { - for k, v := range match { - s = strings.Replace(s, "{"+k+"}", v, -1) - } - return s -} - -// vcsPaths lists the known vcs paths. -var vcsPaths = []*vcsPath{ - // go.googlesource.com - { - prefix: "go.googlesource.com", - re: `^(?Pgo\.googlesource\.com/[A-Za-z0-9_.\-]+/?)$`, - vcs: "git", - repo: "https://{root}", - check: noVCSSuffix, - }, - - // Github - { - prefix: "github.com/", - re: `^(?Pgithub\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/[\p{L}0-9_.\-]+)*$`, - vcs: "git", - repo: "https://{root}", - check: noVCSSuffix, - }, - - // Bitbucket - { - prefix: "bitbucket.org/", - re: `^(?Pbitbucket\.org/(?P[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/[A-Za-z0-9_.\-]+)*$`, - repo: "https://{root}", - check: bitbucketVCS, - }, - - // Launchpad - { - prefix: "launchpad.net/", - re: `^(?Plaunchpad\.net/((?P[A-Za-z0-9_.\-]+)(?P/[A-Za-z0-9_.\-]+)?|~[A-Za-z0-9_.\-]+/(\+junk|[A-Za-z0-9_.\-]+)/[A-Za-z0-9_.\-]+))(/[A-Za-z0-9_.\-]+)*$`, - vcs: "bzr", - repo: "https://{root}", - check: launchpadVCS, - }, - - // Git at OpenStack - { - prefix: "git.openstack.org", - re: `^(?Pgit\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/[A-Za-z0-9_.\-]+)*$`, - vcs: "git", - repo: "https://{root}", - check: noVCSSuffix, - }, - - // General syntax for any server. - { - re: `^(?P(?P([a-z0-9.\-]+\.)+[a-z0-9.\-]+(:[0-9]+)?/[A-Za-z0-9_.\-/]*?)\.(?Pbzr|git|hg|svn))(/[A-Za-z0-9_.\-]+)*$`, - ping: true, - }, -} - -func init() { - // fill in cached regexps. - // Doing this eagerly discovers invalid regexp syntax - // without having to run a command that needs that regexp. - for _, srv := range vcsPaths { - srv.regexp = regexp.MustCompile(srv.re) - } -} - -// noVCSSuffix checks that the repository name does not -// end in .foo for any version control system foo. -// The usual culprit is ".git". -func noVCSSuffix(match map[string]string) error { - repo := match["repo"] - for _, vcs := range vcsList { - if strings.HasSuffix(repo, "."+vcs.Cmd) { - return fmt.Errorf("invalid version control suffix in %s path", match["prefix"]) - } - } - return nil -} - -// bitbucketVCS determines the version control system for a -// Bitbucket repository, by using the Bitbucket API. -func bitbucketVCS(match map[string]string) error { - if err := noVCSSuffix(match); err != nil { - return err - } - - var resp struct { - SCM string `json:"scm"` - } - url := expand(match, "https://api.bitbucket.org/1.0/repositories/{bitname}") - data, err := httpGET(url) - if err != nil { - return err - } - if err := json.Unmarshal(data, &resp); err != nil { - return fmt.Errorf("decoding %s: %v", url, err) - } - - if ByCmd(resp.SCM) != nil { - match["vcs"] = resp.SCM - if resp.SCM == "git" { - match["repo"] += ".git" - } - return nil - } - - return fmt.Errorf("unable to detect version control system for bitbucket.org/ path") -} - -// launchpadVCS solves the ambiguity for "lp.net/project/foo". In this case, -// "foo" could be a series name registered in Launchpad with its own branch, -// and it could also be the name of a directory within the main project -// branch one level up. -func launchpadVCS(match map[string]string) error { - if match["project"] == "" || match["series"] == "" { - return nil - } - _, err := httpGET(expand(match, "https://code.launchpad.net/{project}{series}/.bzr/branch-format")) - if err != nil { - match["root"] = expand(match, "launchpad.net/{project}") - match["repo"] = expand(match, "https://{root}") - } - return nil -} diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE b/vendor/repo-infra/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE deleted file mode 100644 index 1723a2247b..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2013-2016 Guy Bedford, Luke Hoban, Addy Osmani - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/third_party/typescript/LICENSE b/vendor/repo-infra/vendor/golang.org/x/tools/third_party/typescript/LICENSE deleted file mode 100644 index e7259f8438..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/third_party/typescript/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -Apache License - -Version 2.0, January 2004 - -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. - -"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: - -You must give any other recipients of the Work or Derivative Works a copy of this License; and - -You must cause any modified files to carry prominent notices stating that You changed the files; and - -You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and - -If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/vendor/repo-infra/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE b/vendor/repo-infra/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE deleted file mode 100644 index e648283b42..0000000000 --- a/vendor/repo-infra/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2015 The Polymer Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/repo-infra/vendor/k8s.io/klog/BUILD.bazel b/vendor/repo-infra/vendor/k8s.io/klog/BUILD.bazel deleted file mode 100644 index 8defc3c812..0000000000 --- a/vendor/repo-infra/vendor/k8s.io/klog/BUILD.bazel +++ /dev/null @@ -1,12 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") - -go_library( - name = "go_default_library", - srcs = [ - "klog.go", - "klog_file.go", - ], - importmap = "k8s.io/repo-infra/vendor/k8s.io/klog", - importpath = "k8s.io/klog", - visibility = ["//visibility:public"], -) diff --git a/vendor/repo-infra/vendor/k8s.io/klog/LICENSE b/vendor/repo-infra/vendor/k8s.io/klog/LICENSE deleted file mode 100644 index 37ec93a14f..0000000000 --- a/vendor/repo-infra/vendor/k8s.io/klog/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and -distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright -owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities -that control, are controlled by, or are under common control with that entity. -For the purposes of this definition, "control" means (i) the power, direct or -indirect, to cause the direction or management of such entity, whether by -contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising -permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including -but not limited to software source code, documentation source, and configuration -files. - -"Object" form shall mean any form resulting from mechanical transformation or -translation of a Source form, including but not limited to compiled object code, -generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made -available under the License, as indicated by a copyright notice that is included -in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that -is based on (or derived from) the Work and for which the editorial revisions, -annotations, elaborations, or other modifications represent, as a whole, an -original work of authorship. For the purposes of this License, Derivative Works -shall not include works that remain separable from, or merely link (or bind by -name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version -of the Work and any modifications or additions to that Work or Derivative Works -thereof, that is intentionally submitted to Licensor for inclusion in the Work -by the copyright owner or by an individual or Legal Entity authorized to submit -on behalf of the copyright owner. For the purposes of this definition, -"submitted" means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, and -issue tracking systems that are managed by, or on behalf of, the Licensor for -the purpose of discussing and improving the Work, but excluding communication -that is conspicuously marked or otherwise designated in writing by the copyright -owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf -of whom a Contribution has been received by Licensor and subsequently -incorporated within the Work. - -2. Grant of Copyright License. - -Subject to the terms and conditions of this License, each Contributor hereby -grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, -irrevocable copyright license to reproduce, prepare Derivative Works of, -publicly display, publicly perform, sublicense, and distribute the Work and such -Derivative Works in Source or Object form. - -3. Grant of Patent License. - -Subject to the terms and conditions of this License, each Contributor hereby -grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, -irrevocable (except as stated in this section) patent license to make, have -made, use, offer to sell, sell, import, and otherwise transfer the Work, where -such license applies only to those patent claims licensable by such Contributor -that are necessarily infringed by their Contribution(s) alone or by combination -of their Contribution(s) with the Work to which such Contribution(s) was -submitted. If You institute patent litigation against any entity (including a -cross-claim or counterclaim in a lawsuit) alleging that the Work or a -Contribution incorporated within the Work constitutes direct or contributory -patent infringement, then any patent licenses granted to You under this License -for that Work shall terminate as of the date such litigation is filed. - -4. Redistribution. - -You may reproduce and distribute copies of the Work or Derivative Works thereof -in any medium, with or without modifications, and in Source or Object form, -provided that You meet the following conditions: - -You must give any other recipients of the Work or Derivative Works a copy of -this License; and -You must cause any modified files to carry prominent notices stating that You -changed the files; and -You must retain, in the Source form of any Derivative Works that You distribute, -all copyright, patent, trademark, and attribution notices from the Source form -of the Work, excluding those notices that do not pertain to any part of the -Derivative Works; and -If the Work includes a "NOTICE" text file as part of its distribution, then any -Derivative Works that You distribute must include a readable copy of the -attribution notices contained within such NOTICE file, excluding those notices -that do not pertain to any part of the Derivative Works, in at least one of the -following places: within a NOTICE text file distributed as part of the -Derivative Works; within the Source form or documentation, if provided along -with the Derivative Works; or, within a display generated by the Derivative -Works, if and wherever such third-party notices normally appear. The contents of -the NOTICE file are for informational purposes only and do not modify the -License. You may add Your own attribution notices within Derivative Works that -You distribute, alongside or as an addendum to the NOTICE text from the Work, -provided that such additional attribution notices cannot be construed as -modifying the License. -You may add Your own copyright statement to Your modifications and may provide -additional or different license terms and conditions for use, reproduction, or -distribution of Your modifications, or for any such Derivative Works as a whole, -provided Your use, reproduction, and distribution of the Work otherwise complies -with the conditions stated in this License. - -5. Submission of Contributions. - -Unless You explicitly state otherwise, any Contribution intentionally submitted -for inclusion in the Work by You to the Licensor shall be under the terms and -conditions of this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify the terms of -any separate license agreement you may have executed with Licensor regarding -such Contributions. - -6. Trademarks. - -This License does not grant permission to use the trade names, trademarks, -service marks, or product names of the Licensor, except as required for -reasonable and customary use in describing the origin of the Work and -reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. - -Unless required by applicable law or agreed to in writing, Licensor provides the -Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, -including, without limitation, any warranties or conditions of TITLE, -NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are -solely responsible for determining the appropriateness of using or -redistributing the Work and assume any risks associated with Your exercise of -permissions under this License. - -8. Limitation of Liability. - -In no event and under no legal theory, whether in tort (including negligence), -contract, or otherwise, unless required by applicable law (such as deliberate -and grossly negligent acts) or agreed to in writing, shall any Contributor be -liable to You for damages, including any direct, indirect, special, incidental, -or consequential damages of any character arising as a result of this License or -out of the use or inability to use the Work (including but not limited to -damages for loss of goodwill, work stoppage, computer failure or malfunction, or -any and all other commercial damages or losses), even if such Contributor has -been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. - -While redistributing the Work or Derivative Works thereof, You may choose to -offer, and charge a fee for, acceptance of support, warranty, indemnity, or -other liability obligations and/or rights consistent with this License. However, -in accepting such obligations, You may act only on Your own behalf and on Your -sole responsibility, not on behalf of any other Contributor, and only if You -agree to indemnify, defend, and hold each Contributor harmless for any liability -incurred by, or claims asserted against, such Contributor by reason of your -accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work - -To apply the Apache License to your work, attach the following boilerplate -notice, with the fields enclosed by brackets "[]" replaced with your own -identifying information. (Don't include the brackets!) The text should be -enclosed in the appropriate comment syntax for the file format. We also -recommend that a file or class name and description of purpose be included on -the same "printed page" as the copyright notice for easier identification within -third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/repo-infra/vendor/k8s.io/klog/klog.go b/vendor/repo-infra/vendor/k8s.io/klog/klog.go deleted file mode 100644 index 13bcc81a75..0000000000 --- a/vendor/repo-infra/vendor/k8s.io/klog/klog.go +++ /dev/null @@ -1,1239 +0,0 @@ -// Go support for leveled logs, analogous to https://code.google.com/p/google-glog/ -// -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package klog implements logging analogous to the Google-internal C++ INFO/ERROR/V setup. -// It provides functions Info, Warning, Error, Fatal, plus formatting variants such as -// Infof. It also provides V-style logging controlled by the -v and -vmodule=file=2 flags. -// -// Basic examples: -// -// glog.Info("Prepare to repel boarders") -// -// glog.Fatalf("Initialization failed: %s", err) -// -// See the documentation for the V function for an explanation of these examples: -// -// if glog.V(2) { -// glog.Info("Starting transaction...") -// } -// -// glog.V(2).Infoln("Processed", nItems, "elements") -// -// Log output is buffered and written periodically using Flush. Programs -// should call Flush before exiting to guarantee all log output is written. -// -// By default, all log statements write to files in a temporary directory. -// This package provides several flags that modify this behavior. -// As a result, flag.Parse must be called before any logging is done. -// -// -logtostderr=false -// Logs are written to standard error instead of to files. -// -alsologtostderr=false -// Logs are written to standard error as well as to files. -// -stderrthreshold=ERROR -// Log events at or above this severity are logged to standard -// error as well as to files. -// -log_dir="" -// Log files will be written to this directory instead of the -// default temporary directory. -// -// Other flags provide aids to debugging. -// -// -log_backtrace_at="" -// When set to a file and line number holding a logging statement, -// such as -// -log_backtrace_at=gopherflakes.go:234 -// a stack trace will be written to the Info log whenever execution -// hits that statement. (Unlike with -vmodule, the ".go" must be -// present.) -// -v=0 -// Enable V-leveled logging at the specified level. -// -vmodule="" -// The syntax of the argument is a comma-separated list of pattern=N, -// where pattern is a literal file name (minus the ".go" suffix) or -// "glob" pattern and N is a V level. For instance, -// -vmodule=gopher*=3 -// sets the V level to 3 in all Go files whose names begin "gopher". -// -package klog - -import ( - "bufio" - "bytes" - "errors" - "flag" - "fmt" - "io" - stdLog "log" - "os" - "path/filepath" - "runtime" - "strconv" - "strings" - "sync" - "sync/atomic" - "time" -) - -// severity identifies the sort of log: info, warning etc. It also implements -// the flag.Value interface. The -stderrthreshold flag is of type severity and -// should be modified only through the flag.Value interface. The values match -// the corresponding constants in C++. -type severity int32 // sync/atomic int32 - -// These constants identify the log levels in order of increasing severity. -// A message written to a high-severity log file is also written to each -// lower-severity log file. -const ( - infoLog severity = iota - warningLog - errorLog - fatalLog - numSeverity = 4 -) - -const severityChar = "IWEF" - -var severityName = []string{ - infoLog: "INFO", - warningLog: "WARNING", - errorLog: "ERROR", - fatalLog: "FATAL", -} - -// get returns the value of the severity. -func (s *severity) get() severity { - return severity(atomic.LoadInt32((*int32)(s))) -} - -// set sets the value of the severity. -func (s *severity) set(val severity) { - atomic.StoreInt32((*int32)(s), int32(val)) -} - -// String is part of the flag.Value interface. -func (s *severity) String() string { - return strconv.FormatInt(int64(*s), 10) -} - -// Get is part of the flag.Value interface. -func (s *severity) Get() interface{} { - return *s -} - -// Set is part of the flag.Value interface. -func (s *severity) Set(value string) error { - var threshold severity - // Is it a known name? - if v, ok := severityByName(value); ok { - threshold = v - } else { - v, err := strconv.Atoi(value) - if err != nil { - return err - } - threshold = severity(v) - } - logging.stderrThreshold.set(threshold) - return nil -} - -func severityByName(s string) (severity, bool) { - s = strings.ToUpper(s) - for i, name := range severityName { - if name == s { - return severity(i), true - } - } - return 0, false -} - -// OutputStats tracks the number of output lines and bytes written. -type OutputStats struct { - lines int64 - bytes int64 -} - -// Lines returns the number of lines written. -func (s *OutputStats) Lines() int64 { - return atomic.LoadInt64(&s.lines) -} - -// Bytes returns the number of bytes written. -func (s *OutputStats) Bytes() int64 { - return atomic.LoadInt64(&s.bytes) -} - -// Stats tracks the number of lines of output and number of bytes -// per severity level. Values must be read with atomic.LoadInt64. -var Stats struct { - Info, Warning, Error OutputStats -} - -var severityStats = [numSeverity]*OutputStats{ - infoLog: &Stats.Info, - warningLog: &Stats.Warning, - errorLog: &Stats.Error, -} - -// Level is exported because it appears in the arguments to V and is -// the type of the v flag, which can be set programmatically. -// It's a distinct type because we want to discriminate it from logType. -// Variables of type level are only changed under logging.mu. -// The -v flag is read only with atomic ops, so the state of the logging -// module is consistent. - -// Level is treated as a sync/atomic int32. - -// Level specifies a level of verbosity for V logs. *Level implements -// flag.Value; the -v flag is of type Level and should be modified -// only through the flag.Value interface. -type Level int32 - -// get returns the value of the Level. -func (l *Level) get() Level { - return Level(atomic.LoadInt32((*int32)(l))) -} - -// set sets the value of the Level. -func (l *Level) set(val Level) { - atomic.StoreInt32((*int32)(l), int32(val)) -} - -// String is part of the flag.Value interface. -func (l *Level) String() string { - return strconv.FormatInt(int64(*l), 10) -} - -// Get is part of the flag.Value interface. -func (l *Level) Get() interface{} { - return *l -} - -// Set is part of the flag.Value interface. -func (l *Level) Set(value string) error { - v, err := strconv.Atoi(value) - if err != nil { - return err - } - logging.mu.Lock() - defer logging.mu.Unlock() - logging.setVState(Level(v), logging.vmodule.filter, false) - return nil -} - -// moduleSpec represents the setting of the -vmodule flag. -type moduleSpec struct { - filter []modulePat -} - -// modulePat contains a filter for the -vmodule flag. -// It holds a verbosity level and a file pattern to match. -type modulePat struct { - pattern string - literal bool // The pattern is a literal string - level Level -} - -// match reports whether the file matches the pattern. It uses a string -// comparison if the pattern contains no metacharacters. -func (m *modulePat) match(file string) bool { - if m.literal { - return file == m.pattern - } - match, _ := filepath.Match(m.pattern, file) - return match -} - -func (m *moduleSpec) String() string { - // Lock because the type is not atomic. TODO: clean this up. - logging.mu.Lock() - defer logging.mu.Unlock() - var b bytes.Buffer - for i, f := range m.filter { - if i > 0 { - b.WriteRune(',') - } - fmt.Fprintf(&b, "%s=%d", f.pattern, f.level) - } - return b.String() -} - -// Get is part of the (Go 1.2) flag.Getter interface. It always returns nil for this flag type since the -// struct is not exported. -func (m *moduleSpec) Get() interface{} { - return nil -} - -var errVmoduleSyntax = errors.New("syntax error: expect comma-separated list of filename=N") - -// Syntax: -vmodule=recordio=2,file=1,gfs*=3 -func (m *moduleSpec) Set(value string) error { - var filter []modulePat - for _, pat := range strings.Split(value, ",") { - if len(pat) == 0 { - // Empty strings such as from a trailing comma can be ignored. - continue - } - patLev := strings.Split(pat, "=") - if len(patLev) != 2 || len(patLev[0]) == 0 || len(patLev[1]) == 0 { - return errVmoduleSyntax - } - pattern := patLev[0] - v, err := strconv.Atoi(patLev[1]) - if err != nil { - return errors.New("syntax error: expect comma-separated list of filename=N") - } - if v < 0 { - return errors.New("negative value for vmodule level") - } - if v == 0 { - continue // Ignore. It's harmless but no point in paying the overhead. - } - // TODO: check syntax of filter? - filter = append(filter, modulePat{pattern, isLiteral(pattern), Level(v)}) - } - logging.mu.Lock() - defer logging.mu.Unlock() - logging.setVState(logging.verbosity, filter, true) - return nil -} - -// isLiteral reports whether the pattern is a literal string, that is, has no metacharacters -// that require filepath.Match to be called to match the pattern. -func isLiteral(pattern string) bool { - return !strings.ContainsAny(pattern, `\*?[]`) -} - -// traceLocation represents the setting of the -log_backtrace_at flag. -type traceLocation struct { - file string - line int -} - -// isSet reports whether the trace location has been specified. -// logging.mu is held. -func (t *traceLocation) isSet() bool { - return t.line > 0 -} - -// match reports whether the specified file and line matches the trace location. -// The argument file name is the full path, not the basename specified in the flag. -// logging.mu is held. -func (t *traceLocation) match(file string, line int) bool { - if t.line != line { - return false - } - if i := strings.LastIndex(file, "/"); i >= 0 { - file = file[i+1:] - } - return t.file == file -} - -func (t *traceLocation) String() string { - // Lock because the type is not atomic. TODO: clean this up. - logging.mu.Lock() - defer logging.mu.Unlock() - return fmt.Sprintf("%s:%d", t.file, t.line) -} - -// Get is part of the (Go 1.2) flag.Getter interface. It always returns nil for this flag type since the -// struct is not exported -func (t *traceLocation) Get() interface{} { - return nil -} - -var errTraceSyntax = errors.New("syntax error: expect file.go:234") - -// Syntax: -log_backtrace_at=gopherflakes.go:234 -// Note that unlike vmodule the file extension is included here. -func (t *traceLocation) Set(value string) error { - if value == "" { - // Unset. - t.line = 0 - t.file = "" - } - fields := strings.Split(value, ":") - if len(fields) != 2 { - return errTraceSyntax - } - file, line := fields[0], fields[1] - if !strings.Contains(file, ".") { - return errTraceSyntax - } - v, err := strconv.Atoi(line) - if err != nil { - return errTraceSyntax - } - if v <= 0 { - return errors.New("negative or zero value for level") - } - logging.mu.Lock() - defer logging.mu.Unlock() - t.line = v - t.file = file - return nil -} - -// flushSyncWriter is the interface satisfied by logging destinations. -type flushSyncWriter interface { - Flush() error - Sync() error - io.Writer -} - -func init() { - // Default stderrThreshold is ERROR. - logging.stderrThreshold = errorLog - - logging.setVState(0, nil, false) - go logging.flushDaemon() -} - -// InitFlags is for explicitly initializing the flags -func InitFlags(flagset *flag.FlagSet) { - if flagset == nil { - flagset = flag.CommandLine - } - flagset.StringVar(&logging.logDir, "log_dir", "", "If non-empty, write log files in this directory") - flagset.StringVar(&logging.logFile, "log_file", "", "If non-empty, use this log file") - flagset.BoolVar(&logging.toStderr, "logtostderr", false, "log to standard error instead of files") - flagset.BoolVar(&logging.alsoToStderr, "alsologtostderr", false, "log to standard error as well as files") - flagset.Var(&logging.verbosity, "v", "log level for V logs") - flagset.BoolVar(&logging.skipHeaders, "skip_headers", false, "If true, avoid header prefixes in the log messages") - flagset.Var(&logging.stderrThreshold, "stderrthreshold", "logs at or above this threshold go to stderr") - flagset.Var(&logging.vmodule, "vmodule", "comma-separated list of pattern=N settings for file-filtered logging") - flagset.Var(&logging.traceLocation, "log_backtrace_at", "when logging hits line file:N, emit a stack trace") -} - -// Flush flushes all pending log I/O. -func Flush() { - logging.lockAndFlushAll() -} - -// loggingT collects all the global state of the logging setup. -type loggingT struct { - // Boolean flags. Not handled atomically because the flag.Value interface - // does not let us avoid the =true, and that shorthand is necessary for - // compatibility. TODO: does this matter enough to fix? Seems unlikely. - toStderr bool // The -logtostderr flag. - alsoToStderr bool // The -alsologtostderr flag. - - // Level flag. Handled atomically. - stderrThreshold severity // The -stderrthreshold flag. - - // freeList is a list of byte buffers, maintained under freeListMu. - freeList *buffer - // freeListMu maintains the free list. It is separate from the main mutex - // so buffers can be grabbed and printed to without holding the main lock, - // for better parallelization. - freeListMu sync.Mutex - - // mu protects the remaining elements of this structure and is - // used to synchronize logging. - mu sync.Mutex - // file holds writer for each of the log types. - file [numSeverity]flushSyncWriter - // pcs is used in V to avoid an allocation when computing the caller's PC. - pcs [1]uintptr - // vmap is a cache of the V Level for each V() call site, identified by PC. - // It is wiped whenever the vmodule flag changes state. - vmap map[uintptr]Level - // filterLength stores the length of the vmodule filter chain. If greater - // than zero, it means vmodule is enabled. It may be read safely - // using sync.LoadInt32, but is only modified under mu. - filterLength int32 - // traceLocation is the state of the -log_backtrace_at flag. - traceLocation traceLocation - // These flags are modified only under lock, although verbosity may be fetched - // safely using atomic.LoadInt32. - vmodule moduleSpec // The state of the -vmodule flag. - verbosity Level // V logging level, the value of the -v flag/ - - // If non-empty, overrides the choice of directory in which to write logs. - // See createLogDirs for the full list of possible destinations. - logDir string - - // If non-empty, specifies the path of the file to write logs. mutually exclusive - // with the log-dir option. - logFile string - - // If true, do not add the prefix headers, useful when used with SetOutput - skipHeaders bool -} - -// buffer holds a byte Buffer for reuse. The zero value is ready for use. -type buffer struct { - bytes.Buffer - tmp [64]byte // temporary byte array for creating headers. - next *buffer -} - -var logging loggingT - -// setVState sets a consistent state for V logging. -// l.mu is held. -func (l *loggingT) setVState(verbosity Level, filter []modulePat, setFilter bool) { - // Turn verbosity off so V will not fire while we are in transition. - logging.verbosity.set(0) - // Ditto for filter length. - atomic.StoreInt32(&logging.filterLength, 0) - - // Set the new filters and wipe the pc->Level map if the filter has changed. - if setFilter { - logging.vmodule.filter = filter - logging.vmap = make(map[uintptr]Level) - } - - // Things are consistent now, so enable filtering and verbosity. - // They are enabled in order opposite to that in V. - atomic.StoreInt32(&logging.filterLength, int32(len(filter))) - logging.verbosity.set(verbosity) -} - -// getBuffer returns a new, ready-to-use buffer. -func (l *loggingT) getBuffer() *buffer { - l.freeListMu.Lock() - b := l.freeList - if b != nil { - l.freeList = b.next - } - l.freeListMu.Unlock() - if b == nil { - b = new(buffer) - } else { - b.next = nil - b.Reset() - } - return b -} - -// putBuffer returns a buffer to the free list. -func (l *loggingT) putBuffer(b *buffer) { - if b.Len() >= 256 { - // Let big buffers die a natural death. - return - } - l.freeListMu.Lock() - b.next = l.freeList - l.freeList = b - l.freeListMu.Unlock() -} - -var timeNow = time.Now // Stubbed out for testing. - -/* -header formats a log header as defined by the C++ implementation. -It returns a buffer containing the formatted header and the user's file and line number. -The depth specifies how many stack frames above lives the source line to be identified in the log message. - -Log lines have this form: - Lmmdd hh:mm:ss.uuuuuu threadid file:line] msg... -where the fields are defined as follows: - L A single character, representing the log level (eg 'I' for INFO) - mm The month (zero padded; ie May is '05') - dd The day (zero padded) - hh:mm:ss.uuuuuu Time in hours, minutes and fractional seconds - threadid The space-padded thread ID as returned by GetTID() - file The file name - line The line number - msg The user-supplied message -*/ -func (l *loggingT) header(s severity, depth int) (*buffer, string, int) { - _, file, line, ok := runtime.Caller(3 + depth) - if !ok { - file = "???" - line = 1 - } else { - slash := strings.LastIndex(file, "/") - if slash >= 0 { - file = file[slash+1:] - } - } - return l.formatHeader(s, file, line), file, line -} - -// formatHeader formats a log header using the provided file name and line number. -func (l *loggingT) formatHeader(s severity, file string, line int) *buffer { - now := timeNow() - if line < 0 { - line = 0 // not a real line number, but acceptable to someDigits - } - if s > fatalLog { - s = infoLog // for safety. - } - buf := l.getBuffer() - if l.skipHeaders { - return buf - } - - // Avoid Fprintf, for speed. The format is so simple that we can do it quickly by hand. - // It's worth about 3X. Fprintf is hard. - _, month, day := now.Date() - hour, minute, second := now.Clock() - // Lmmdd hh:mm:ss.uuuuuu threadid file:line] - buf.tmp[0] = severityChar[s] - buf.twoDigits(1, int(month)) - buf.twoDigits(3, day) - buf.tmp[5] = ' ' - buf.twoDigits(6, hour) - buf.tmp[8] = ':' - buf.twoDigits(9, minute) - buf.tmp[11] = ':' - buf.twoDigits(12, second) - buf.tmp[14] = '.' - buf.nDigits(6, 15, now.Nanosecond()/1000, '0') - buf.tmp[21] = ' ' - buf.nDigits(7, 22, pid, ' ') // TODO: should be TID - buf.tmp[29] = ' ' - buf.Write(buf.tmp[:30]) - buf.WriteString(file) - buf.tmp[0] = ':' - n := buf.someDigits(1, line) - buf.tmp[n+1] = ']' - buf.tmp[n+2] = ' ' - buf.Write(buf.tmp[:n+3]) - return buf -} - -// Some custom tiny helper functions to print the log header efficiently. - -const digits = "0123456789" - -// twoDigits formats a zero-prefixed two-digit integer at buf.tmp[i]. -func (buf *buffer) twoDigits(i, d int) { - buf.tmp[i+1] = digits[d%10] - d /= 10 - buf.tmp[i] = digits[d%10] -} - -// nDigits formats an n-digit integer at buf.tmp[i], -// padding with pad on the left. -// It assumes d >= 0. -func (buf *buffer) nDigits(n, i, d int, pad byte) { - j := n - 1 - for ; j >= 0 && d > 0; j-- { - buf.tmp[i+j] = digits[d%10] - d /= 10 - } - for ; j >= 0; j-- { - buf.tmp[i+j] = pad - } -} - -// someDigits formats a zero-prefixed variable-width integer at buf.tmp[i]. -func (buf *buffer) someDigits(i, d int) int { - // Print into the top, then copy down. We know there's space for at least - // a 10-digit number. - j := len(buf.tmp) - for { - j-- - buf.tmp[j] = digits[d%10] - d /= 10 - if d == 0 { - break - } - } - return copy(buf.tmp[i:], buf.tmp[j:]) -} - -func (l *loggingT) println(s severity, args ...interface{}) { - buf, file, line := l.header(s, 0) - fmt.Fprintln(buf, args...) - l.output(s, buf, file, line, false) -} - -func (l *loggingT) print(s severity, args ...interface{}) { - l.printDepth(s, 1, args...) -} - -func (l *loggingT) printDepth(s severity, depth int, args ...interface{}) { - buf, file, line := l.header(s, depth) - fmt.Fprint(buf, args...) - if buf.Bytes()[buf.Len()-1] != '\n' { - buf.WriteByte('\n') - } - l.output(s, buf, file, line, false) -} - -func (l *loggingT) printf(s severity, format string, args ...interface{}) { - buf, file, line := l.header(s, 0) - fmt.Fprintf(buf, format, args...) - if buf.Bytes()[buf.Len()-1] != '\n' { - buf.WriteByte('\n') - } - l.output(s, buf, file, line, false) -} - -// printWithFileLine behaves like print but uses the provided file and line number. If -// alsoLogToStderr is true, the log message always appears on standard error; it -// will also appear in the log file unless --logtostderr is set. -func (l *loggingT) printWithFileLine(s severity, file string, line int, alsoToStderr bool, args ...interface{}) { - buf := l.formatHeader(s, file, line) - fmt.Fprint(buf, args...) - if buf.Bytes()[buf.Len()-1] != '\n' { - buf.WriteByte('\n') - } - l.output(s, buf, file, line, alsoToStderr) -} - -// redirectBuffer is used to set an alternate destination for the logs -type redirectBuffer struct { - w io.Writer -} - -func (rb *redirectBuffer) Sync() error { - return nil -} - -func (rb *redirectBuffer) Flush() error { - return nil -} - -func (rb *redirectBuffer) Write(bytes []byte) (n int, err error) { - return rb.w.Write(bytes) -} - -// SetOutput sets the output destination for all severities -func SetOutput(w io.Writer) { - for s := fatalLog; s >= infoLog; s-- { - rb := &redirectBuffer{ - w: w, - } - logging.file[s] = rb - } -} - -// SetOutputBySeverity sets the output destination for specific severity -func SetOutputBySeverity(name string, w io.Writer) { - sev, ok := severityByName(name) - if !ok { - panic(fmt.Sprintf("SetOutputBySeverity(%q): unrecognized severity name", name)) - } - rb := &redirectBuffer{ - w: w, - } - logging.file[sev] = rb -} - -// output writes the data to the log files and releases the buffer. -func (l *loggingT) output(s severity, buf *buffer, file string, line int, alsoToStderr bool) { - l.mu.Lock() - if l.traceLocation.isSet() { - if l.traceLocation.match(file, line) { - buf.Write(stacks(false)) - } - } - data := buf.Bytes() - if l.toStderr { - os.Stderr.Write(data) - } else { - if alsoToStderr || l.alsoToStderr || s >= l.stderrThreshold.get() { - os.Stderr.Write(data) - } - if l.file[s] == nil { - if err := l.createFiles(s); err != nil { - os.Stderr.Write(data) // Make sure the message appears somewhere. - l.exit(err) - } - } - switch s { - case fatalLog: - l.file[fatalLog].Write(data) - fallthrough - case errorLog: - l.file[errorLog].Write(data) - fallthrough - case warningLog: - l.file[warningLog].Write(data) - fallthrough - case infoLog: - l.file[infoLog].Write(data) - } - } - if s == fatalLog { - // If we got here via Exit rather than Fatal, print no stacks. - if atomic.LoadUint32(&fatalNoStacks) > 0 { - l.mu.Unlock() - timeoutFlush(10 * time.Second) - os.Exit(1) - } - // Dump all goroutine stacks before exiting. - // First, make sure we see the trace for the current goroutine on standard error. - // If -logtostderr has been specified, the loop below will do that anyway - // as the first stack in the full dump. - if !l.toStderr { - os.Stderr.Write(stacks(false)) - } - // Write the stack trace for all goroutines to the files. - trace := stacks(true) - logExitFunc = func(error) {} // If we get a write error, we'll still exit below. - for log := fatalLog; log >= infoLog; log-- { - if f := l.file[log]; f != nil { // Can be nil if -logtostderr is set. - f.Write(trace) - } - } - l.mu.Unlock() - timeoutFlush(10 * time.Second) - os.Exit(255) // C++ uses -1, which is silly because it's anded with 255 anyway. - } - l.putBuffer(buf) - l.mu.Unlock() - if stats := severityStats[s]; stats != nil { - atomic.AddInt64(&stats.lines, 1) - atomic.AddInt64(&stats.bytes, int64(len(data))) - } -} - -// timeoutFlush calls Flush and returns when it completes or after timeout -// elapses, whichever happens first. This is needed because the hooks invoked -// by Flush may deadlock when glog.Fatal is called from a hook that holds -// a lock. -func timeoutFlush(timeout time.Duration) { - done := make(chan bool, 1) - go func() { - Flush() // calls logging.lockAndFlushAll() - done <- true - }() - select { - case <-done: - case <-time.After(timeout): - fmt.Fprintln(os.Stderr, "glog: Flush took longer than", timeout) - } -} - -// stacks is a wrapper for runtime.Stack that attempts to recover the data for all goroutines. -func stacks(all bool) []byte { - // We don't know how big the traces are, so grow a few times if they don't fit. Start large, though. - n := 10000 - if all { - n = 100000 - } - var trace []byte - for i := 0; i < 5; i++ { - trace = make([]byte, n) - nbytes := runtime.Stack(trace, all) - if nbytes < len(trace) { - return trace[:nbytes] - } - n *= 2 - } - return trace -} - -// logExitFunc provides a simple mechanism to override the default behavior -// of exiting on error. Used in testing and to guarantee we reach a required exit -// for fatal logs. Instead, exit could be a function rather than a method but that -// would make its use clumsier. -var logExitFunc func(error) - -// exit is called if there is trouble creating or writing log files. -// It flushes the logs and exits the program; there's no point in hanging around. -// l.mu is held. -func (l *loggingT) exit(err error) { - fmt.Fprintf(os.Stderr, "log: exiting because of error: %s\n", err) - // If logExitFunc is set, we do that instead of exiting. - if logExitFunc != nil { - logExitFunc(err) - return - } - l.flushAll() - os.Exit(2) -} - -// syncBuffer joins a bufio.Writer to its underlying file, providing access to the -// file's Sync method and providing a wrapper for the Write method that provides log -// file rotation. There are conflicting methods, so the file cannot be embedded. -// l.mu is held for all its methods. -type syncBuffer struct { - logger *loggingT - *bufio.Writer - file *os.File - sev severity - nbytes uint64 // The number of bytes written to this file -} - -func (sb *syncBuffer) Sync() error { - return sb.file.Sync() -} - -func (sb *syncBuffer) Write(p []byte) (n int, err error) { - if sb.nbytes+uint64(len(p)) >= MaxSize { - if err := sb.rotateFile(time.Now()); err != nil { - sb.logger.exit(err) - } - } - n, err = sb.Writer.Write(p) - sb.nbytes += uint64(n) - if err != nil { - sb.logger.exit(err) - } - return -} - -// rotateFile closes the syncBuffer's file and starts a new one. -func (sb *syncBuffer) rotateFile(now time.Time) error { - if sb.file != nil { - sb.Flush() - sb.file.Close() - } - var err error - sb.file, _, err = create(severityName[sb.sev], now) - sb.nbytes = 0 - if err != nil { - return err - } - - sb.Writer = bufio.NewWriterSize(sb.file, bufferSize) - - // Write header. - var buf bytes.Buffer - fmt.Fprintf(&buf, "Log file created at: %s\n", now.Format("2006/01/02 15:04:05")) - fmt.Fprintf(&buf, "Running on machine: %s\n", host) - fmt.Fprintf(&buf, "Binary: Built with %s %s for %s/%s\n", runtime.Compiler, runtime.Version(), runtime.GOOS, runtime.GOARCH) - fmt.Fprintf(&buf, "Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg\n") - n, err := sb.file.Write(buf.Bytes()) - sb.nbytes += uint64(n) - return err -} - -// bufferSize sizes the buffer associated with each log file. It's large -// so that log records can accumulate without the logging thread blocking -// on disk I/O. The flushDaemon will block instead. -const bufferSize = 256 * 1024 - -// createFiles creates all the log files for severity from sev down to infoLog. -// l.mu is held. -func (l *loggingT) createFiles(sev severity) error { - now := time.Now() - // Files are created in decreasing severity order, so as soon as we find one - // has already been created, we can stop. - for s := sev; s >= infoLog && l.file[s] == nil; s-- { - sb := &syncBuffer{ - logger: l, - sev: s, - } - if err := sb.rotateFile(now); err != nil { - return err - } - l.file[s] = sb - } - return nil -} - -const flushInterval = 30 * time.Second - -// flushDaemon periodically flushes the log file buffers. -func (l *loggingT) flushDaemon() { - for range time.NewTicker(flushInterval).C { - l.lockAndFlushAll() - } -} - -// lockAndFlushAll is like flushAll but locks l.mu first. -func (l *loggingT) lockAndFlushAll() { - l.mu.Lock() - l.flushAll() - l.mu.Unlock() -} - -// flushAll flushes all the logs and attempts to "sync" their data to disk. -// l.mu is held. -func (l *loggingT) flushAll() { - // Flush from fatal down, in case there's trouble flushing. - for s := fatalLog; s >= infoLog; s-- { - file := l.file[s] - if file != nil { - file.Flush() // ignore error - file.Sync() // ignore error - } - } -} - -// CopyStandardLogTo arranges for messages written to the Go "log" package's -// default logs to also appear in the Google logs for the named and lower -// severities. Subsequent changes to the standard log's default output location -// or format may break this behavior. -// -// Valid names are "INFO", "WARNING", "ERROR", and "FATAL". If the name is not -// recognized, CopyStandardLogTo panics. -func CopyStandardLogTo(name string) { - sev, ok := severityByName(name) - if !ok { - panic(fmt.Sprintf("log.CopyStandardLogTo(%q): unrecognized severity name", name)) - } - // Set a log format that captures the user's file and line: - // d.go:23: message - stdLog.SetFlags(stdLog.Lshortfile) - stdLog.SetOutput(logBridge(sev)) -} - -// logBridge provides the Write method that enables CopyStandardLogTo to connect -// Go's standard logs to the logs provided by this package. -type logBridge severity - -// Write parses the standard logging line and passes its components to the -// logger for severity(lb). -func (lb logBridge) Write(b []byte) (n int, err error) { - var ( - file = "???" - line = 1 - text string - ) - // Split "d.go:23: message" into "d.go", "23", and "message". - if parts := bytes.SplitN(b, []byte{':'}, 3); len(parts) != 3 || len(parts[0]) < 1 || len(parts[2]) < 1 { - text = fmt.Sprintf("bad log format: %s", b) - } else { - file = string(parts[0]) - text = string(parts[2][1:]) // skip leading space - line, err = strconv.Atoi(string(parts[1])) - if err != nil { - text = fmt.Sprintf("bad line number: %s", b) - line = 1 - } - } - // printWithFileLine with alsoToStderr=true, so standard log messages - // always appear on standard error. - logging.printWithFileLine(severity(lb), file, line, true, text) - return len(b), nil -} - -// setV computes and remembers the V level for a given PC -// when vmodule is enabled. -// File pattern matching takes the basename of the file, stripped -// of its .go suffix, and uses filepath.Match, which is a little more -// general than the *? matching used in C++. -// l.mu is held. -func (l *loggingT) setV(pc uintptr) Level { - fn := runtime.FuncForPC(pc) - file, _ := fn.FileLine(pc) - // The file is something like /a/b/c/d.go. We want just the d. - if strings.HasSuffix(file, ".go") { - file = file[:len(file)-3] - } - if slash := strings.LastIndex(file, "/"); slash >= 0 { - file = file[slash+1:] - } - for _, filter := range l.vmodule.filter { - if filter.match(file) { - l.vmap[pc] = filter.level - return filter.level - } - } - l.vmap[pc] = 0 - return 0 -} - -// Verbose is a boolean type that implements Infof (like Printf) etc. -// See the documentation of V for more information. -type Verbose bool - -// V reports whether verbosity at the call site is at least the requested level. -// The returned value is a boolean of type Verbose, which implements Info, Infoln -// and Infof. These methods will write to the Info log if called. -// Thus, one may write either -// if glog.V(2) { glog.Info("log this") } -// or -// glog.V(2).Info("log this") -// The second form is shorter but the first is cheaper if logging is off because it does -// not evaluate its arguments. -// -// Whether an individual call to V generates a log record depends on the setting of -// the -v and --vmodule flags; both are off by default. If the level in the call to -// V is at least the value of -v, or of -vmodule for the source file containing the -// call, the V call will log. -func V(level Level) Verbose { - // This function tries hard to be cheap unless there's work to do. - // The fast path is two atomic loads and compares. - - // Here is a cheap but safe test to see if V logging is enabled globally. - if logging.verbosity.get() >= level { - return Verbose(true) - } - - // It's off globally but it vmodule may still be set. - // Here is another cheap but safe test to see if vmodule is enabled. - if atomic.LoadInt32(&logging.filterLength) > 0 { - // Now we need a proper lock to use the logging structure. The pcs field - // is shared so we must lock before accessing it. This is fairly expensive, - // but if V logging is enabled we're slow anyway. - logging.mu.Lock() - defer logging.mu.Unlock() - if runtime.Callers(2, logging.pcs[:]) == 0 { - return Verbose(false) - } - v, ok := logging.vmap[logging.pcs[0]] - if !ok { - v = logging.setV(logging.pcs[0]) - } - return Verbose(v >= level) - } - return Verbose(false) -} - -// Info is equivalent to the global Info function, guarded by the value of v. -// See the documentation of V for usage. -func (v Verbose) Info(args ...interface{}) { - if v { - logging.print(infoLog, args...) - } -} - -// Infoln is equivalent to the global Infoln function, guarded by the value of v. -// See the documentation of V for usage. -func (v Verbose) Infoln(args ...interface{}) { - if v { - logging.println(infoLog, args...) - } -} - -// Infof is equivalent to the global Infof function, guarded by the value of v. -// See the documentation of V for usage. -func (v Verbose) Infof(format string, args ...interface{}) { - if v { - logging.printf(infoLog, format, args...) - } -} - -// Info logs to the INFO log. -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Info(args ...interface{}) { - logging.print(infoLog, args...) -} - -// InfoDepth acts as Info but uses depth to determine which call frame to log. -// InfoDepth(0, "msg") is the same as Info("msg"). -func InfoDepth(depth int, args ...interface{}) { - logging.printDepth(infoLog, depth, args...) -} - -// Infoln logs to the INFO log. -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Infoln(args ...interface{}) { - logging.println(infoLog, args...) -} - -// Infof logs to the INFO log. -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Infof(format string, args ...interface{}) { - logging.printf(infoLog, format, args...) -} - -// Warning logs to the WARNING and INFO logs. -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Warning(args ...interface{}) { - logging.print(warningLog, args...) -} - -// WarningDepth acts as Warning but uses depth to determine which call frame to log. -// WarningDepth(0, "msg") is the same as Warning("msg"). -func WarningDepth(depth int, args ...interface{}) { - logging.printDepth(warningLog, depth, args...) -} - -// Warningln logs to the WARNING and INFO logs. -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Warningln(args ...interface{}) { - logging.println(warningLog, args...) -} - -// Warningf logs to the WARNING and INFO logs. -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Warningf(format string, args ...interface{}) { - logging.printf(warningLog, format, args...) -} - -// Error logs to the ERROR, WARNING, and INFO logs. -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Error(args ...interface{}) { - logging.print(errorLog, args...) -} - -// ErrorDepth acts as Error but uses depth to determine which call frame to log. -// ErrorDepth(0, "msg") is the same as Error("msg"). -func ErrorDepth(depth int, args ...interface{}) { - logging.printDepth(errorLog, depth, args...) -} - -// Errorln logs to the ERROR, WARNING, and INFO logs. -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Errorln(args ...interface{}) { - logging.println(errorLog, args...) -} - -// Errorf logs to the ERROR, WARNING, and INFO logs. -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Errorf(format string, args ...interface{}) { - logging.printf(errorLog, format, args...) -} - -// Fatal logs to the FATAL, ERROR, WARNING, and INFO logs, -// including a stack trace of all running goroutines, then calls os.Exit(255). -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Fatal(args ...interface{}) { - logging.print(fatalLog, args...) -} - -// FatalDepth acts as Fatal but uses depth to determine which call frame to log. -// FatalDepth(0, "msg") is the same as Fatal("msg"). -func FatalDepth(depth int, args ...interface{}) { - logging.printDepth(fatalLog, depth, args...) -} - -// Fatalln logs to the FATAL, ERROR, WARNING, and INFO logs, -// including a stack trace of all running goroutines, then calls os.Exit(255). -// Arguments are handled in the manner of fmt.Println; a newline is appended if missing. -func Fatalln(args ...interface{}) { - logging.println(fatalLog, args...) -} - -// Fatalf logs to the FATAL, ERROR, WARNING, and INFO logs, -// including a stack trace of all running goroutines, then calls os.Exit(255). -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Fatalf(format string, args ...interface{}) { - logging.printf(fatalLog, format, args...) -} - -// fatalNoStacks is non-zero if we are to exit without dumping goroutine stacks. -// It allows Exit and relatives to use the Fatal logs. -var fatalNoStacks uint32 - -// Exit logs to the FATAL, ERROR, WARNING, and INFO logs, then calls os.Exit(1). -// Arguments are handled in the manner of fmt.Print; a newline is appended if missing. -func Exit(args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.print(fatalLog, args...) -} - -// ExitDepth acts as Exit but uses depth to determine which call frame to log. -// ExitDepth(0, "msg") is the same as Exit("msg"). -func ExitDepth(depth int, args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.printDepth(fatalLog, depth, args...) -} - -// Exitln logs to the FATAL, ERROR, WARNING, and INFO logs, then calls os.Exit(1). -func Exitln(args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.println(fatalLog, args...) -} - -// Exitf logs to the FATAL, ERROR, WARNING, and INFO logs, then calls os.Exit(1). -// Arguments are handled in the manner of fmt.Printf; a newline is appended if missing. -func Exitf(format string, args ...interface{}) { - atomic.StoreUint32(&fatalNoStacks, 1) - logging.printf(fatalLog, format, args...) -} diff --git a/vendor/repo-infra/vendor/k8s.io/klog/klog_file.go b/vendor/repo-infra/vendor/k8s.io/klog/klog_file.go deleted file mode 100644 index b76a4e10be..0000000000 --- a/vendor/repo-infra/vendor/k8s.io/klog/klog_file.go +++ /dev/null @@ -1,126 +0,0 @@ -// Go support for leveled logs, analogous to https://code.google.com/p/google-glog/ -// -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// File I/O for logs. - -package klog - -import ( - "errors" - "fmt" - "os" - "os/user" - "path/filepath" - "strings" - "sync" - "time" -) - -// MaxSize is the maximum size of a log file in bytes. -var MaxSize uint64 = 1024 * 1024 * 1800 - -// logDirs lists the candidate directories for new log files. -var logDirs []string - -func createLogDirs() { - if logging.logDir != "" { - logDirs = append(logDirs, logging.logDir) - } - logDirs = append(logDirs, os.TempDir()) -} - -var ( - pid = os.Getpid() - program = filepath.Base(os.Args[0]) - host = "unknownhost" - userName = "unknownuser" -) - -func init() { - h, err := os.Hostname() - if err == nil { - host = shortHostname(h) - } - - current, err := user.Current() - if err == nil { - userName = current.Username - } - - // Sanitize userName since it may contain filepath separators on Windows. - userName = strings.Replace(userName, `\`, "_", -1) -} - -// shortHostname returns its argument, truncating at the first period. -// For instance, given "www.google.com" it returns "www". -func shortHostname(hostname string) string { - if i := strings.Index(hostname, "."); i >= 0 { - return hostname[:i] - } - return hostname -} - -// logName returns a new log file name containing tag, with start time t, and -// the name for the symlink for tag. -func logName(tag string, t time.Time) (name, link string) { - name = fmt.Sprintf("%s.%s.%s.log.%s.%04d%02d%02d-%02d%02d%02d.%d", - program, - host, - userName, - tag, - t.Year(), - t.Month(), - t.Day(), - t.Hour(), - t.Minute(), - t.Second(), - pid) - return name, program + "." + tag -} - -var onceLogDirs sync.Once - -// create creates a new log file and returns the file and its filename, which -// contains tag ("INFO", "FATAL", etc.) and t. If the file is created -// successfully, create also attempts to update the symlink for that tag, ignoring -// errors. -func create(tag string, t time.Time) (f *os.File, filename string, err error) { - if logging.logFile != "" { - f, err := os.Create(logging.logFile) - if err == nil { - return f, logging.logFile, nil - } - return nil, "", fmt.Errorf("log: unable to create log: %v", err) - } - onceLogDirs.Do(createLogDirs) - if len(logDirs) == 0 { - return nil, "", errors.New("log: no log dirs") - } - name, link := logName(tag, t) - var lastErr error - for _, dir := range logDirs { - fname := filepath.Join(dir, name) - f, err := os.Create(fname) - if err == nil { - symlink := filepath.Join(dir, link) - os.Remove(symlink) // ignore err - os.Symlink(name, symlink) // ignore err - return f, fname, nil - } - lastErr = err - } - return nil, "", fmt.Errorf("log: cannot create log: %v", lastErr) -} diff --git a/vendor/repo-infra/verify/BUILD.bazel b/vendor/repo-infra/verify/BUILD.bazel deleted file mode 100644 index 10b3f5e31a..0000000000 --- a/vendor/repo-infra/verify/BUILD.bazel +++ /dev/null @@ -1,20 +0,0 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_path") - -sh_test( - name = "verify-crosstool", - srcs = ["verify-crosstool.sh"], - args = [ - "$(location //tools:CROSSTOOL)", - "$(location //tools:generated_CROSSTOOL.textpb)", - ], - data = [ - "//tools:CROSSTOOL", - "//tools:generated_CROSSTOOL.textpb", - ], -) - -go_path( - name = "verify-go-src-go_path", - mode = "link", - deps = ["//tools/generate_crosstool:crosstool_config_go_proto"], -) diff --git a/vendor/repo-infra/verify/README.md b/vendor/repo-infra/verify/README.md deleted file mode 100644 index 6eca4ece1b..0000000000 --- a/vendor/repo-infra/verify/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# Verification scripts - -Collection of scripts that verifies that a project meets requirements set for kubernetes related projects. The scripts are to be invoked depending on the needs via CI tooling, such as Travis CI. See main Readme file on how to integrate the repo-infra in your project. - -The scripts are currently being migrated from the main kubernetes repository. If your project requires additional set of verifications, consider creating an issue/PR on repo-infra to avoid code duplication across multiple projects. - -If repo-infra is integrated at the root of your project as git submodule at path: `/repo-infra`, -then scripts can be invoked as `repo-infra/verify/verify-*.sh` - -travis.yaml example: - -``` -dist: trusty - -os: -- linux - -language: go - -go: -- 1.8 - -before_install: -- go get -u github.com/alecthomas/gometalinter - -install: -- gometalinter --install - -script: -- repo-infra/verify/verify-go-src.sh -v -- repo-infra/verify/verify-boilerplate.sh -# OR with vendoring -# - vendor/github.com/kubernetes/repo-infra/verify-go-src.sh --rootdir=$(pwd) -v -``` - -## Verify boilerplate - -Verifies that the boilerplate for various formats (go files, Makefile, etc.) is included in each file: `verify-boilerplate.sh`. - -## Verify go source code - -Runs a set of scripts on the go source code excluding vendored files: `verify-go-src.sh`. Expects `gometalinter` tooling installed (see travis file above) - -With git submodule from your repo root: `repo-infra/verify/verify-go-src.sh -v` - -With vendoring: `vendor/repo-infra/verify/verify-go-src.sh -v --rootdir $(pwd)` - -Checks include: - -1. gofmt -2. gometalinter -3. govet diff --git a/vendor/repo-infra/verify/boilerplate/BUILD.bazel b/vendor/repo-infra/verify/boilerplate/BUILD.bazel deleted file mode 100644 index c405bde045..0000000000 --- a/vendor/repo-infra/verify/boilerplate/BUILD.bazel +++ /dev/null @@ -1,3 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -exports_files(glob(["*.txt"])) diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.Dockerfile.txt b/vendor/repo-infra/verify/boilerplate/boilerplate.Dockerfile.txt deleted file mode 100644 index 384f325abf..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.Dockerfile.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright YEAR The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.Makefile.txt b/vendor/repo-infra/verify/boilerplate/boilerplate.Makefile.txt deleted file mode 100644 index 384f325abf..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.Makefile.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright YEAR The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.bzl.txt b/vendor/repo-infra/verify/boilerplate/boilerplate.bzl.txt deleted file mode 100644 index 384f325abf..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.bzl.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright YEAR The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.go.txt b/vendor/repo-infra/verify/boilerplate/boilerplate.go.txt deleted file mode 100644 index 59e740c1ee..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.go.txt +++ /dev/null @@ -1,16 +0,0 @@ -/* -Copyright YEAR The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.py b/vendor/repo-infra/verify/boilerplate/boilerplate.py deleted file mode 100755 index 3507c214cf..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import argparse -import difflib -import glob -import json -import mmap -import os -import re -import sys -from datetime import date - -parser = argparse.ArgumentParser() -parser.add_argument( - "filenames", - help="list of files to check, all files if unspecified", - nargs='*') - -# Rootdir defaults to the directory **above** the repo-infra dir. -rootdir = os.path.dirname(__file__) + "/../../../" -rootdir = os.path.abspath(rootdir) -parser.add_argument( - "--rootdir", default=rootdir, help="root directory to examine") - -default_boilerplate_dir = os.path.join(rootdir, "repo-infra/verify/boilerplate") -parser.add_argument( - "--boilerplate-dir", default=default_boilerplate_dir) - -parser.add_argument( - "-v", "--verbose", - help="give verbose output regarding why a file does not pass", - action="store_true") - -args = parser.parse_args() - -verbose_out = sys.stderr if args.verbose else open("/dev/null", "w") - -def get_refs(): - refs = {} - - for path in glob.glob(os.path.join(args.boilerplate_dir, "boilerplate.*.txt")): - extension = os.path.basename(path).split(".")[1] - - ref_file = open(path, 'r') - ref = ref_file.read().splitlines() - ref_file.close() - refs[extension] = ref - - return refs - -def file_passes(filename, refs, regexs): - try: - f = open(filename, 'r') - except Exception as exc: - print("Unable to open %s: %s" % (filename, exc), file=verbose_out) - return False - - data = f.read() - f.close() - - basename = os.path.basename(filename) - extension = file_extension(filename) - if extension != "": - ref = refs[extension] - else: - ref = refs[basename] - - # remove build tags from the top of Go files - if extension == "go": - p = regexs["go_build_constraints"] - (data, found) = p.subn("", data, 1) - - # remove shebang from the top of shell files - if extension == "sh" or extension == "py": - p = regexs["shebang"] - (data, found) = p.subn("", data, 1) - - data = data.splitlines() - - # if our test file is smaller than the reference it surely fails! - if len(ref) > len(data): - print('File %s smaller than reference (%d < %d)' % - (filename, len(data), len(ref)), - file=verbose_out) - return False - - # trim our file to the same number of lines as the reference file - data = data[:len(ref)] - - p = regexs["year"] - for d in data: - if p.search(d): - print('File %s is missing the year' % filename, file=verbose_out) - return False - - # Replace all occurrences of the regex "CURRENT_YEAR|...|2016|2015|2014" with "YEAR" - p = regexs["date"] - for i, d in enumerate(data): - (data[i], found) = p.subn('YEAR', d) - if found != 0: - break - - # if we don't match the reference at this point, fail - if ref != data: - print("Header in %s does not match reference, diff:" % filename, file=verbose_out) - if args.verbose: - print(file=verbose_out) - for line in difflib.unified_diff(ref, data, 'reference', filename, lineterm=''): - print(line, file=verbose_out) - print(file=verbose_out) - return False - - return True - -def file_extension(filename): - return os.path.splitext(filename)[1].split(".")[-1].lower() - -skipped_dirs = ['Godeps', 'third_party', '_gopath', '_output', '.git', - 'cluster/env.sh', 'vendor', 'test/e2e/generated/bindata.go', - 'repo-infra/verify/boilerplate/test', '.glide'] - -def normalize_files(files): - newfiles = [] - for pathname in files: - if any(x in pathname for x in skipped_dirs): - continue - newfiles.append(pathname) - for i, pathname in enumerate(newfiles): - if not os.path.isabs(pathname): - newfiles[i] = os.path.join(args.rootdir, pathname) - return newfiles - -def get_files(extensions): - files = [] - if len(args.filenames) > 0: - files = args.filenames - else: - for root, dirs, walkfiles in os.walk(args.rootdir): - # don't visit certain dirs. This is just a performance improvement - # as we would prune these later in normalize_files(). But doing it - # cuts down the amount of filesystem walking we do and cuts down - # the size of the file list - for d in skipped_dirs: - if d in dirs: - dirs.remove(d) - - for name in walkfiles: - pathname = os.path.join(root, name) - files.append(pathname) - - files = normalize_files(files) - - outfiles = [] - for pathname in files: - basename = os.path.basename(pathname) - extension = file_extension(pathname) - if extension in extensions or basename in extensions: - outfiles.append(pathname) - return outfiles - -def get_regexs(): - regexs = {} - # Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing - regexs["year"] = re.compile( 'YEAR' ) - # dates can be 2014, 2015, 2016, ..., CURRENT_YEAR, company holder names can be anything - years = range(2014, date.today().year + 1) - regexs["date"] = re.compile( '(%s)' % "|".join(map(lambda l: str(l), years)) ) - # strip // +build \n\n build constraints - regexs["go_build_constraints"] = re.compile(r"^(// \+build.*\n)+\n", re.MULTILINE) - # strip #!.* from shell scripts - regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE) - return regexs - -def main(): - regexs = get_regexs() - refs = get_refs() - filenames = get_files(refs.keys()) - - for filename in filenames: - if not file_passes(filename, refs, regexs): - print(filename, file=sys.stdout) - - return 0 - -if __name__ == "__main__": - sys.exit(main()) diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.py.txt b/vendor/repo-infra/verify/boilerplate/boilerplate.py.txt deleted file mode 100644 index 384f325abf..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.py.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright YEAR The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate.sh.txt b/vendor/repo-infra/verify/boilerplate/boilerplate.sh.txt deleted file mode 100644 index 384f325abf..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate.sh.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright YEAR The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/vendor/repo-infra/verify/boilerplate/boilerplate_test.py b/vendor/repo-infra/verify/boilerplate/boilerplate_test.py deleted file mode 100644 index b8d5b8e9e0..0000000000 --- a/vendor/repo-infra/verify/boilerplate/boilerplate_test.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import boilerplate -import unittest -import StringIO -import os -import sys - -class TestBoilerplate(unittest.TestCase): - """ - Note: run this test from the hack/boilerplate directory. - - $ python -m unittest boilerplate_test - """ - - def test_boilerplate(self): - os.chdir("test/") - - class Args(object): - def __init__(self): - self.filenames = [] - self.rootdir = "." - self.boilerplate_dir = "../" - self.verbose = True - - # capture stdout - old_stdout = sys.stdout - sys.stdout = StringIO.StringIO() - - boilerplate.args = Args() - ret = boilerplate.main() - - output = sorted(sys.stdout.getvalue().split()) - - sys.stdout = old_stdout - - self.assertEquals( - output, ['././fail.go', '././fail.py']) diff --git a/vendor/repo-infra/verify/boilerplate/test/BUILD.bazel b/vendor/repo-infra/verify/boilerplate/test/BUILD.bazel deleted file mode 100644 index ec18049a11..0000000000 --- a/vendor/repo-infra/verify/boilerplate/test/BUILD.bazel +++ /dev/null @@ -1,17 +0,0 @@ -package(default_visibility = ["//visibility:public"]) - -licenses(["notice"]) - -load( - "@io_bazel_rules_go//go:def.bzl", - "go_library", -) - -go_library( - name = "go_default_library", - srcs = [ - "fail.go", - "pass.go", - ], - importpath = "k8s.io/repo-infra/verify/boilerplate/test", -) diff --git a/vendor/repo-infra/verify/boilerplate/test/fail.go b/vendor/repo-infra/verify/boilerplate/test/fail.go deleted file mode 100644 index 16159c5ac0..0000000000 --- a/vendor/repo-infra/verify/boilerplate/test/fail.go +++ /dev/null @@ -1,19 +0,0 @@ -/* -Copyright 2014 The Kubernetes Authors. - -fail - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package test diff --git a/vendor/repo-infra/verify/boilerplate/test/fail.py b/vendor/repo-infra/verify/boilerplate/test/fail.py deleted file mode 100644 index cbdd06ff8a..0000000000 --- a/vendor/repo-infra/verify/boilerplate/test/fail.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 The Kubernetes Authors. -# -# failed -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/vendor/repo-infra/verify/boilerplate/test/pass.go b/vendor/repo-infra/verify/boilerplate/test/pass.go deleted file mode 100644 index 7508448aae..0000000000 --- a/vendor/repo-infra/verify/boilerplate/test/pass.go +++ /dev/null @@ -1,17 +0,0 @@ -/* -Copyright 2014 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package test diff --git a/vendor/repo-infra/verify/boilerplate/test/pass.py b/vendor/repo-infra/verify/boilerplate/test/pass.py deleted file mode 100644 index 5b7ce29a25..0000000000 --- a/vendor/repo-infra/verify/boilerplate/test/pass.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2015 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -True diff --git a/vendor/repo-infra/verify/go-tools/verify-gofmt.sh b/vendor/repo-infra/verify/go-tools/verify-gofmt.sh deleted file mode 100755 index 1f09fd2b10..0000000000 --- a/vendor/repo-infra/verify/go-tools/verify-gofmt.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -find_files() { - find . -not \( \ - \( \ - -wholename '*/vendor/*' \ - \) -prune \ - \) -name '*.go' -} - -GOFMT="gofmt -s" -bad_files=$(find_files | xargs $GOFMT -l) -if [[ -n "${bad_files}" ]]; then - echo "!!! '$GOFMT' needs to be run on the following files: " - echo "${bad_files}" - exit 1 -fi diff --git a/vendor/repo-infra/verify/go-tools/verify-gometalinter.sh b/vendor/repo-infra/verify/go-tools/verify-gometalinter.sh deleted file mode 100755 index fa6e6b54ad..0000000000 --- a/vendor/repo-infra/verify/go-tools/verify-gometalinter.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -gometalinter --deadline="${GOMETALINTER_DEADLINE:-180s}" --vendor \ - --cyclo-over=50 --dupl-threshold=100 \ - --exclude=".*should not use dot imports \(golint\)$" \ - --disable-all \ - --enable=vet \ - --enable=deadcode \ - --enable=golint \ - --enable=vetshadow \ - --enable=gocyclo \ - --skip=.git \ - --skip=.tool \ - --skip=vendor \ - --tests \ - ./... diff --git a/vendor/repo-infra/verify/go-tools/verify-govet.sh b/vendor/repo-infra/verify/go-tools/verify-govet.sh deleted file mode 100755 index 5f690bd31b..0000000000 --- a/vendor/repo-infra/verify/go-tools/verify-govet.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -go vet -v $(go list ./... | grep -v /vendor/) diff --git a/vendor/repo-infra/verify/go_install_from_commit.sh b/vendor/repo-infra/verify/go_install_from_commit.sh deleted file mode 100755 index ee6fd0d9c1..0000000000 --- a/vendor/repo-infra/verify/go_install_from_commit.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -PKG=$1 -COMMIT=$2 -export GOPATH=$3 -export GOBIN="$GOPATH/bin" - -go get -d -u "${PKG}" -cd "${GOPATH}/src/${PKG}" -git checkout -q "${COMMIT}" -go install "${PKG}" diff --git a/vendor/repo-infra/verify/update-bazel.sh b/vendor/repo-infra/verify/update-bazel.sh deleted file mode 100755 index 03ab547006..0000000000 --- a/vendor/repo-infra/verify/update-bazel.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -REPOINFRA_ROOT=$(git rev-parse --show-toplevel) -# https://github.com/kubernetes/test-infra/issues/5699#issuecomment-348350792 -cd ${REPOINFRA_ROOT} - -OUTPUT_GOBIN="${REPOINFRA_ROOT}/_output/bin" -GOBIN="${OUTPUT_GOBIN}" go install ./vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle -GOBIN="${OUTPUT_GOBIN}" go install ./kazel - -touch "${REPOINFRA_ROOT}/vendor/BUILD.bazel" - -"${OUTPUT_GOBIN}/gazelle" fix \ - -external=vendored \ - -mode=fix - -"${OUTPUT_GOBIN}/kazel" diff --git a/vendor/repo-infra/verify/verify-bazel.sh b/vendor/repo-infra/verify/verify-bazel.sh deleted file mode 100755 index 159a9815ab..0000000000 --- a/vendor/repo-infra/verify/verify-bazel.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2016 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -REPOINFRA_ROOT=$(git rev-parse --show-toplevel) -# https://github.com/kubernetes/test-infra/issues/5699#issuecomment-348350792 -cd ${REPOINFRA_ROOT} - -OUTPUT_GOBIN="${REPOINFRA_ROOT}/_output/bin" -GOBIN="${OUTPUT_GOBIN}" go install ./vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle -GOBIN="${OUTPUT_GOBIN}" go install ./kazel - -touch "${REPOINFRA_ROOT}/vendor/BUILD.bazel" - -gazelle_diff=$("${OUTPUT_GOBIN}/gazelle" fix \ - -external=vendored \ - -mode=diff) - -kazel_diff=$("${OUTPUT_GOBIN}/kazel" \ - -dry-run \ - -print-diff) - -if [[ -n "${gazelle_diff}" || -n "${kazel_diff}" ]]; then - echo "${gazelle_diff}" - echo "${kazel_diff}" - echo - echo "Run ./verify/update-bazel.sh" - exit 1 -fi diff --git a/vendor/repo-infra/verify/verify-boilerplate.sh b/vendor/repo-infra/verify/verify-boilerplate.sh deleted file mode 100755 index 0168d336eb..0000000000 --- a/vendor/repo-infra/verify/verify-boilerplate.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -# Copyright 2014 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -# This script is intended to be used via subtree in a top-level directory: -# / -# repo-infra/ -# verify/ - -REPO_ROOT=$(dirname "${BASH_SOURCE}")/../.. - -boilerDir="${REPO_ROOT}/repo-infra/verify/boilerplate" -boiler="${boilerDir}/boilerplate.py" - -files_need_boilerplate=($(${boiler} "$@")) - -# Run boilerplate.py unit tests -unitTestOut="$(mktemp)" -trap cleanup EXIT -cleanup() { - rm "${unitTestOut}" -} - -pushd "${boilerDir}" >/dev/null -if ! python -m unittest boilerplate_test 2>"${unitTestOut}"; then - echo "boilerplate_test.py failed" - echo - cat "${unitTestOut}" - exit 1 -fi -popd >/dev/null - -# Run boilerplate check -if [[ ${#files_need_boilerplate[@]} -gt 0 ]]; then - for file in "${files_need_boilerplate[@]}"; do - echo "Boilerplate header is wrong for: ${file}" - done - - exit 1 -fi diff --git a/vendor/repo-infra/verify/verify-crosstool.sh b/vendor/repo-infra/verify/verify-crosstool.sh deleted file mode 100755 index 66766c1efb..0000000000 --- a/vendor/repo-infra/verify/verify-crosstool.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2019 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -checked_in_crosstool=$1 -generated_crosstool=$2 - -diff=$(diff -u "${checked_in_crosstool}" "${generated_crosstool}" || true) - -if [[ -n "${diff}" ]]; then - echo "Checked-in CROSSTOOL does not match generated CROSSTOOL" - echo "${diff}" - echo - echo - echo "To fix: " - echo " cp $(realpath ${generated_crosstool}) $(realpath ${checked_in_crosstool})" - exit 1 -fi diff --git a/vendor/repo-infra/verify/verify-errexit.sh b/vendor/repo-infra/verify/verify-errexit.sh deleted file mode 100755 index a30f31c48c..0000000000 --- a/vendor/repo-infra/verify/verify-errexit.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This script will verify that the specified script files have -# "set -o errexit" turned on at some point. -# -# Usage: verify-errexit.sh [ dir | file ... ] -# default args is the root of our source tree - -set -o errexit -set -o nounset -set -o pipefail - -REPO_ROOT=$(dirname "${BASH_SOURCE}")/.. - -if [[ "$*" != "" ]]; then - args="$*" -else - args=$(ls "${REPO_ROOT}" | grep -v vendor | grep -v glide) -fi - -# Gather the list of files that appear to be shell scripts. -# Meaning they have some form of "#!...sh" as a line in them. -shFiles=$(grep -rl '^#!.*sh$' ${args}) - -rc="0" -for file in ${shFiles}; do - grep "set -o errexit" ${file} &> /dev/null && continue - grep "set -[a-z]*e" ${file} &> /dev/null && continue - - echo ${file}: appears to be missing \"set -o errexit\" - rc="1" -done - -exit ${rc} - diff --git a/vendor/repo-infra/verify/verify-go-src.sh b/vendor/repo-infra/verify/verify-go-src.sh deleted file mode 100755 index edf73dd7be..0000000000 --- a/vendor/repo-infra/verify/verify-go-src.sh +++ /dev/null @@ -1,111 +0,0 @@ -#!/bin/bash - -# Copyright 2017 The Kubernetes Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset -set -o pipefail - -# This script is intended to be used via subtree in a top-level directory: -# / -# repo-infra/ -# verify/ -# Or via vendoring and passing root directory as vendor/repo-infra/verify-*.sh --rootdir **full path to your repo dir** -# / -# vendor/ -# repo-infra/ -# ... -# - - -SILENT=true -REPO_ROOT=$(dirname "${BASH_SOURCE}")/../.. - -# Convert long opts to short ones to read through getopts -for arg in "$@"; do - shift - case "$arg" in - "--rootdir") set -- "$@" "-r";; - *) - set -- "$@" "$arg" - ;; - esac -done - -OPTIND=1 -while getopts "vr:" opt; do - case ${opt} in - v) - SILENT=false - ;; - r) - REPO_ROOT=${OPTARG} - ;; - \?) - echo "Invalid flag: -${OPTARG}" >&2 - exit 1 - ;; - esac -done - -shift "$(($OPTIND-1))" - -echo "Working directory: ${REPO_ROOT}" - -GO_TOOLS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/go-tools" - -function run-cmd { - if ${SILENT}; then - "$@" &> /dev/null - else - "$@" - fi -} - -# Some useful colors. -if [[ -z "${color_start-}" ]]; then - declare -r color_start="\033[" - declare -r color_red="${color_start}0;31m" - declare -r color_yellow="${color_start}0;33m" - declare -r color_green="${color_start}0;32m" - declare -r color_norm="${color_start}0m" -fi - -function run-checks { - local -r pattern=$1 - local -r runner=$2 - - for t in $(ls ${pattern}) - do - echo -e "Verifying ${t}" - local start=$(date +%s) - cd $REPO_ROOT && run-cmd "${runner}" "${t}" && tr=$? || tr=$? - local elapsed=$(($(date +%s) - ${start})) - if [[ ${tr} -eq 0 ]]; then - echo -e "${color_green}SUCCESS${color_norm} ${t}\t${elapsed}s" - else - echo -e "${color_red}FAILED${color_norm} ${t}\t${elapsed}s" - ret=1 - fi - done -} - -if ${SILENT} ; then - echo "Running in the silent mode, run with -v if you want to see script logs." -fi - -ret=0 -run-checks "${GO_TOOLS_DIR}/*.sh" bash -exit ${ret} From 6b046dfe3ec7b87c52e2f0530b905189f01c9c25 Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Fri, 3 Jul 2020 10:59:17 +0100 Subject: [PATCH 13/14] Squashed 'vendor/k8s.io/repo-infra/' content from commit 63918b096 git-subtree-dir: vendor/k8s.io/repo-infra git-subtree-split: 63918b0966c3f320f7af2a17d85d13f0170ba05c --- .bazelignore | 1 + .bazelrc | 55 + .bazelversion | 1 + .gitignore | 12 + .golangci.yml | 28 + .kazelcfg.json | 4 + BUILD.bazel | 82 + CONTRIBUTING.md | 9 + LICENSE | 201 ++ OWNERS | 8 + README.md | 65 + SECURITY_CONTACTS | 14 + WORKSPACE | 12 + cmd/kazel/BUILD.bazel | 55 + cmd/kazel/README.rst | 90 + cmd/kazel/config.go | 70 + cmd/kazel/diff.go | 60 + cmd/kazel/generator.go | 178 ++ cmd/kazel/generator_test.go | 122 + cmd/kazel/kazel.go | 402 ++++ cmd/kazel/kazel_test.go | 76 + cmd/kazel/sourcerer.go | 109 + code-of-conduct.md | 3 + defs/BUILD.bazel | 62 + defs/build.bzl | 200 ++ defs/deb.bzl | 66 + defs/diff_test.sh | 29 + defs/gcs_uploader.py | 92 + defs/go.bzl | 124 + defs/pkg.bzl | 34 + defs/rpm.bzl | 44 + defs/run_in_workspace.bzl | 91 + defs/testdata/testfile.txt | 1 + defs/testdata/testfile.txt.md5.expected | 1 + defs/testdata/testfile.txt.sha1.expected | 1 + defs/testdata/testfile.txt.sha512.expected | 1 + defs/testgen/BUILD.bazel | 28 + defs/testgen/main.go | 55 + defs/testpkg/BUILD.bazel | 38 + defs/testpkg/pkg.go | 27 + go.mod | 11 + go.sum | 575 +++++ hack/BUILD.bazel | 166 ++ hack/tools.go | 25 + hack/update-bazel.sh | 49 + hack/update-deps.sh | 92 + hack/update-gofmt.sh | 36 + hack/verify-bazel.sh | 64 + hack/verify-deps.sh | 75 + hack/verify-gofmt.sh | 43 + hack/verify-golangci-lint.sh | 49 + hack/verify_boilerplate.py | 261 +++ hack/verify_boilerplate_test.py | 62 + load.bzl | 115 + presubmit.sh | 32 + repos.bzl | 2004 +++++++++++++++++ tools/CROSSTOOL | 512 +++++ tools/build_tar/BUILD.bazel | 32 + tools/build_tar/buildtar.go | 611 +++++ verify/BUILD.bazel | 16 + verify/README.md | 52 + verify/boilerplate/BUILD.bazel | 29 + verify/boilerplate/boilerplate.Dockerfile.txt | 14 + verify/boilerplate/boilerplate.Makefile.txt | 14 + verify/boilerplate/boilerplate.bzl.txt | 14 + verify/boilerplate/boilerplate.go.txt | 16 + verify/boilerplate/boilerplate.py.txt | 14 + verify/boilerplate/boilerplate.sh.txt | 14 + verify/boilerplate/test/fail.go | 19 + verify/boilerplate/test/fail.py | 17 + verify/boilerplate/test/pass.go | 17 + verify/boilerplate/test/pass.py | 17 + verify/go_install_from_commit.sh | 28 + verify/verify-bazel.sh | 23 + verify/verify-boilerplate.sh | 24 + verify/verify-errexit.sh | 48 + verify/verify-go-src.sh | 22 + 77 files changed, 7763 insertions(+) create mode 100644 .bazelignore create mode 100644 .bazelrc create mode 100644 .bazelversion create mode 100644 .gitignore create mode 100644 .golangci.yml create mode 100644 .kazelcfg.json create mode 100644 BUILD.bazel create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE create mode 100644 OWNERS create mode 100644 README.md create mode 100644 SECURITY_CONTACTS create mode 100644 WORKSPACE create mode 100644 cmd/kazel/BUILD.bazel create mode 100644 cmd/kazel/README.rst create mode 100644 cmd/kazel/config.go create mode 100644 cmd/kazel/diff.go create mode 100644 cmd/kazel/generator.go create mode 100644 cmd/kazel/generator_test.go create mode 100644 cmd/kazel/kazel.go create mode 100644 cmd/kazel/kazel_test.go create mode 100644 cmd/kazel/sourcerer.go create mode 100644 code-of-conduct.md create mode 100644 defs/BUILD.bazel create mode 100644 defs/build.bzl create mode 100644 defs/deb.bzl create mode 100755 defs/diff_test.sh create mode 100644 defs/gcs_uploader.py create mode 100644 defs/go.bzl create mode 100644 defs/pkg.bzl create mode 100644 defs/rpm.bzl create mode 100644 defs/run_in_workspace.bzl create mode 100644 defs/testdata/testfile.txt create mode 100644 defs/testdata/testfile.txt.md5.expected create mode 100644 defs/testdata/testfile.txt.sha1.expected create mode 100644 defs/testdata/testfile.txt.sha512.expected create mode 100644 defs/testgen/BUILD.bazel create mode 100644 defs/testgen/main.go create mode 100644 defs/testpkg/BUILD.bazel create mode 100644 defs/testpkg/pkg.go create mode 100644 go.mod create mode 100644 go.sum create mode 100644 hack/BUILD.bazel create mode 100644 hack/tools.go create mode 100755 hack/update-bazel.sh create mode 100755 hack/update-deps.sh create mode 100755 hack/update-gofmt.sh create mode 100755 hack/verify-bazel.sh create mode 100755 hack/verify-deps.sh create mode 100755 hack/verify-gofmt.sh create mode 100755 hack/verify-golangci-lint.sh create mode 100755 hack/verify_boilerplate.py create mode 100644 hack/verify_boilerplate_test.py create mode 100644 load.bzl create mode 100755 presubmit.sh create mode 100644 repos.bzl create mode 100755 tools/CROSSTOOL create mode 100644 tools/build_tar/BUILD.bazel create mode 100644 tools/build_tar/buildtar.go create mode 100644 verify/BUILD.bazel create mode 100644 verify/README.md create mode 100644 verify/boilerplate/BUILD.bazel create mode 100644 verify/boilerplate/boilerplate.Dockerfile.txt create mode 100644 verify/boilerplate/boilerplate.Makefile.txt create mode 100644 verify/boilerplate/boilerplate.bzl.txt create mode 100644 verify/boilerplate/boilerplate.go.txt create mode 100644 verify/boilerplate/boilerplate.py.txt create mode 100644 verify/boilerplate/boilerplate.sh.txt create mode 100644 verify/boilerplate/test/fail.go create mode 100644 verify/boilerplate/test/fail.py create mode 100644 verify/boilerplate/test/pass.go create mode 100644 verify/boilerplate/test/pass.py create mode 100755 verify/go_install_from_commit.sh create mode 100755 verify/verify-bazel.sh create mode 100755 verify/verify-boilerplate.sh create mode 100755 verify/verify-errexit.sh create mode 100755 verify/verify-go-src.sh diff --git a/.bazelignore b/.bazelignore new file mode 100644 index 0000000000..6b8710a711 --- /dev/null +++ b/.bazelignore @@ -0,0 +1 @@ +.git diff --git a/.bazelrc b/.bazelrc new file mode 100644 index 0000000000..ce59f93fdd --- /dev/null +++ b/.bazelrc @@ -0,0 +1,55 @@ +build --verbose_failures +test --test_output=errors + +# TODO(fejta): figure out why this causes problems +#test --features=race # enable data race detection + +# Note needs an instance name +# https://github.com/bazelbuild/bazel-toolchains/blob/master/bazelrc/bazel-0.27.0.bazelrc +build:remote --jobs=500 +build:remote --host_javabase=@rbe_default//java:jdk +build:remote --javabase=@rbe_default//java:jdk +build:remote --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8 +build:remote --java_toolchain=@bazel_tools//tools/jdk:toolchain_hostjdk8 +build:remote --crosstool_top=@rbe_default//cc:toolchain +build:remote --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1 + +build:remote --extra_toolchains=@rbe_default//config:cc-toolchain +build:remote --extra_execution_platforms=@io_k8s_repo_infra//:rbe_with_network +build:remote --host_platform=@io_k8s_repo_infra//:rbe_with_network +build:remote --platforms=@io_k8s_repo_infra//:rbe_with_network + +build:remote --define=EXECUTOR=remote +build:remote --remote_executor=grpcs://remotebuildexecution.googleapis.com +build:remote --remote_timeout=3600 + +# Alt: --google_credentials=some_file.json +build:remote --google_default_credentials=true + +# Improve cache hit rate +build:remote --incompatible_strict_action_env=true + +# Minimize what is downloaded +build:inmemory --experimental_inmemory_jdeps_files +build:inmemory --experimental_inmemory_dotd_files + +# Minimize what is downloaded +build:toplevel --config=inmemory +build:toplevel --experimental_remote_download_outputs=toplevel + +build:minimal --config=inmemory +build:minimal --experimental_remote_download_outputs=minimal + +build:remote --config=toplevel + +run:remote --experimental_remote_download_outputs=all --noexperimental_inmemory_jdeps_files --noexperimental_inmemory_dotd_files + +# Compose the remote configs with an instance name +# A couple examples below: + +# --config=ci-instance adds the instance name +build:ci-instance --remote_instance_name=projects/k8s-prow-builds/instances/default_instance + +# Config we want to use in ci +build:ci --config=remote --config=ci-instance +build:ci --noshow_progress diff --git a/.bazelversion b/.bazelversion new file mode 100644 index 0000000000..ccbccc3dc6 --- /dev/null +++ b/.bazelversion @@ -0,0 +1 @@ +2.2.0 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..820ec5cdbb --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +/bazel-* +/_output + +# Vim-related files +[._]*.s[a-w][a-z] +[._]s[a-w][a-z] +*.un~ diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000000..f93130353a --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,28 @@ +run: + deadline: 180s + tests: true + skip-dirs: + - .git + - .tool + - vendor + - verify + +linters-settings: + dupl: + threshold: 100 + gocyclo: + min-complexity: 50 + +linters: + enable: + - govet + - deadcode + - golint + - gocyclo + disable-all: true + +issues: + exclude-rules: + - linters: + - golint + text: ".*should not use dot dot imports" diff --git a/.kazelcfg.json b/.kazelcfg.json new file mode 100644 index 0000000000..49de0ce362 --- /dev/null +++ b/.kazelcfg.json @@ -0,0 +1,4 @@ +{ + "GoPrefix": "k8s.io/repo-infra", + "AddSourcesRules": true +} diff --git a/BUILD.bazel b/BUILD.bazel new file mode 100644 index 0000000000..52340ba6ba --- /dev/null +++ b/BUILD.bazel @@ -0,0 +1,82 @@ +# gazelle:prefix k8s.io/repo-infra +# gazelle:exclude hack +# gazelle:proto disable + +# Use the Bazel-vendored protobuf library since we use go_proto_library +# gazelle:resolve go github.com/golang/protobuf/proto @com_github_golang_protobuf//proto:go_default_library + +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +load("//defs:run_in_workspace.bzl", "workspace_binary") + +workspace_binary( + name = "gofmt", + cmd = "@go_sdk//:bin/gofmt", +) + +workspace_binary( + name = "go", + cmd = "@go_sdk//:bin/go", +) + +workspace_binary( + name = "kazel", + cmd = "//cmd/kazel", +) + +workspace_binary( + name = "golangci-lint", + cmd = "@com_github_golangci_golangci_lint//cmd/golangci-lint", +) + +workspace_binary( + name = "buildifier", + cmd = "@com_github_bazelbuild_buildtools//buildifier", +) + +load("@bazel_gazelle//:def.bzl", "gazelle") + +gazelle(name = "gazelle") + +exports_files([".kazelcfg.json"]) + +filegroup( + name = "package-srcs", + srcs = glob( + ["**"], + exclude = [ + "bazel-*/**", + ".git/**", + ], + ), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [ + ":package-srcs", + "//cmd/kazel:all-srcs", + "//defs:all-srcs", + "//hack:all-srcs", + "//tools/build_tar:all-srcs", + "//verify:all-srcs", + ], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) + +platform( + name = "rbe_with_network", + parents = ["@rbe_default//config:platform"], + remote_execution_properties = """ + properties: { + name: "dockerNetwork" + value: "standard" + } + {PARENT_REMOTE_EXECUTION_PROPERTIES} + """, +) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..ef37eb0b61 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,9 @@ +# Contributing + +Thanks for taking the time to join our community and start contributing! + +The [Contributor Guide](https://github.com/kubernetes/community/blob/master/contributors/guide/README.md) +provides detailed instructions on how to get your ideas and bug fixes seen and accepted. + +Please remember to sign the [CNCF CLA](https://github.com/kubernetes/community/blob/master/CLA.md) and +read and observe the [Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md). diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..8dada3edaf --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/OWNERS b/OWNERS new file mode 100644 index 0000000000..352371c646 --- /dev/null +++ b/OWNERS @@ -0,0 +1,8 @@ +approvers: +- BenTheElder +- clarketm +- fejta +- mikedanese + +emeritus_approvers: +- ixdy diff --git a/README.md b/README.md new file mode 100644 index 0000000000..9bc8e1683f --- /dev/null +++ b/README.md @@ -0,0 +1,65 @@ +# Kubernetes repository infrastructure +[![Build Status](https://travis-ci.org/kubernetes/repo-infra.svg?branch=master)](https://travis-ci.org/kubernetes/repo-infra) [![Go Report Card](https://goreportcard.com/badge/github.com/kubernetes/repo-infra)](https://goreportcard.com/report/github.com/kubernetes/repo-infra) + +This repository contains repository infrastructure tools for use in +`kubernetes` and `kubernetes-incubator` repositories. Examples: + +- Boilerplate verification +- Go source code quality verification +- Golang build infrastructure + +--- + +## Using this repository + +This repository can be used via some golang "vendoring" mechanism +(such as glide), or it can be used via +[git subtree](http://git.kernel.org/cgit/git/git.git/plain/contrib/subtree/git-subtree.txt). + +### Using "vendoring" + +The exact mechanism to pull in this repository will vary depending on +the tool you use. However, unless you end up having this repository +at the root of your project's repository you will probably need to +make sure you use the `--rootdir` command line parameter to let the +`verify-boilerplate.sh` know its location, eg: + + verify-boilerplate.sh --rootdir=/home/myrepo + +### Using `git subtree` + +When using the git subtree mechanism, this repository should be placed in the +top level of your project. + +To add `repo-infra` to your repository, use the following commands from the +root directory of **your** repository. + +First, add a git remote for the `repo-infra` repository: + +``` +$ git remote add repo-infra git://github.com/kubernetes/repo-infra +``` + +This is not strictly necessary, but reduces the typing required for subsequent +commands. + +Next, use `git subtree add` to create a new subtree in the `repo-infra` +directory within your project: + +``` +$ git subtree add -P repo-infra repo-infra master --squash +``` + +After this command, you will have: + +1. A `repo-infra` directory in your project containing the content of **this** + project +2. 2 new commits in the active branch: + 1. A commit that squashes the git history of the `repo-infra` project + 2. A merge commit whose ancestors are: + 1. The `HEAD` of the branch prior to when you ran `git subtree add` + 2. The commit containing the squashed `repo-infra` commits + +### Contributing + +Please see [CONTRIBUTING.md](CONTRIBUTING.md) for instructions on how to contribute. diff --git a/SECURITY_CONTACTS b/SECURITY_CONTACTS new file mode 100644 index 0000000000..f5fb5c9414 --- /dev/null +++ b/SECURITY_CONTACTS @@ -0,0 +1,14 @@ +# Defined below are the security contacts for this repo. +# +# They are the contact point for the Product Security Committee to reach out +# to for triaging and handling of incoming issues. +# +# The below names agree to abide by the +# [Embargo Policy](https://git.k8s.io/security/private-distributors-list.md#embargo-policy) +# and will be removed and replaced if they violate that agreement. +# +# DO NOT REPORT SECURITY VULNERABILITIES DIRECTLY TO THESE NAMES, FOLLOW THE +# INSTRUCTIONS AT https://kubernetes.io/security/ + +ixdy +mikedanese diff --git a/WORKSPACE b/WORKSPACE new file mode 100644 index 0000000000..41600da030 --- /dev/null +++ b/WORKSPACE @@ -0,0 +1,12 @@ +# gazelle:repository_macro repos.bzl%go_repositories +workspace(name = "io_k8s_repo_infra") + +load("//:load.bzl", "repositories") + +repositories() + +load("//:repos.bzl", "configure", "repo_infra_go_repositories") + +configure() + +repo_infra_go_repositories() diff --git a/cmd/kazel/BUILD.bazel b/cmd/kazel/BUILD.bazel new file mode 100644 index 0000000000..2f9c6bc475 --- /dev/null +++ b/cmd/kazel/BUILD.bazel @@ -0,0 +1,55 @@ +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +load( + "@io_bazel_rules_go//go:def.bzl", + "go_binary", + "go_library", + "go_test", +) + +go_binary( + name = "kazel", + embed = [":go_default_library"], +) + +go_library( + name = "go_default_library", + srcs = [ + "config.go", + "diff.go", + "generator.go", + "kazel.go", + "sourcerer.go", + ], + importpath = "k8s.io/repo-infra/cmd/kazel", + deps = [ + "@com_github_bazelbuild_buildtools//build:go_default_library", + "@io_k8s_klog_v2//:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = [ + "generator_test.go", + "kazel_test.go", + ], + embed = [":go_default_library"], + deps = ["@com_github_bazelbuild_buildtools//build:go_default_library"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/cmd/kazel/README.rst b/cmd/kazel/README.rst new file mode 100644 index 0000000000..2cc96aee7c --- /dev/null +++ b/cmd/kazel/README.rst @@ -0,0 +1,90 @@ +kazel - a BUILD file generator for go and bazel +=============================================== + +Requirements: +############# + +* Your project must be somewhat compatible with go tool because + kazel uses go tool to parse your import tree. +* You must have a **GOPATH** and **GOROOT** setup and your project must + be in the correct location in your **GOPATH**. +* Your ``./vendor`` directory may not contain ``BUILD`` files. + +Usage: +###### + +1. Get kazel by running ``go get k8s.io/repo-infra/kazel``. + +2. Create a ``.kazelcfg.json`` in the root of the repository. For the + kazel repository, the ``.kazelcfg.json`` would look like: + + .. code-block:: json + + { + "GoPrefix": "k8s.io/repo-infra", + "SrcDirs": [ + "./kazel" + ], + "SkippedPaths": [ + ".*foobar(baz)?.*$" + ] + } + +3. Run kazel: + + .. code-block:: bash + + $ kazel -root=$GOPATH/src/k8s.io/repo-infra + +Defaults: +######### + +* **SrcDirs** in ``.kazelcfg.json`` defaults to ``["./"]`` +* ``-root`` option defaults to the current working directory + +Automanagement: +############### + +kazel reconciles rules that have the "**automanaged**" tag. If +you no longer want kazel to manage a rule, you can remove the +**automanaged** tag and kazel will no longer manage that rule. + +kazel only manages srcs, deps, and library attributes of a +rule after initial creation so you can add and managed other +attributes like data and copts and kazel will respect your +changes. + +kazel automatically formats all ``BUILD`` files in your repository +except for those matching **SkippedPaths**. + +Adding "sources" rules: +####################### + +If you set "**AddSourcesRules**": ``true`` in your ``.kazelcfg.json``, +kazel will create "**package-srcs**" and "**all-srcs**" rules in every +package. + +The "**package-srcs**" rule is a glob matching all files in the +package recursively, but not any files owned by packages in +subdirectories. + +The "**all-srcs**" rule includes both the "**package-srcs**" rule and +the "**all-srcs**" rules of all subpackages; i.e. **//:all-srcs** will +include all files in your repository. + +The "**package-srcs**" rule defaults to private visibility, +since it is safer to depend on the "**all-srcs**" rule: if a +subpackage is added, the "**package-srcs**" rule will no longer +include those files. + +You can remove the "**automanaged**" tag from the "**package-srcs**" +rule if you need to modify the glob (such as adding excludes). +It's recommended that you leave the "**all-srcs**" rule +automanaged. + +Validating BUILD files in CI: +############################# + +If you run kazel with ``--validate``, it will not update any ``BUILD`` files, but it +will exit nonzero if any ``BUILD`` files are out-of-date. You can add ``--print-diff`` +to print out the changes needed. diff --git a/cmd/kazel/config.go b/cmd/kazel/config.go new file mode 100644 index 0000000000..3d2363196c --- /dev/null +++ b/cmd/kazel/config.go @@ -0,0 +1,70 @@ +/* +Copyright 2017 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "encoding/json" + "io/ioutil" +) + +// Cfg defines the configuration options for kazel. +type Cfg struct { + GoPrefix string + // evaluated recursively, defaults to ["."] + SrcDirs []string + // regexps that match packages to skip + SkippedPaths []string + // regexps that match packages to skip for k8s codegen. + // note that this skips anything matched by SkippedPaths as well. + SkippedK8sCodegenPaths []string + // whether to add "pkg-srcs" and "all-srcs" filegroups + // note that this operates on the entire tree (not just SrcsDirs) but skips anything matching SkippedPaths + AddSourcesRules bool + // whether to have multiple build files in vendor/ or just one. + VendorMultipleBuildFiles bool + // Whether to manage the upstream Go rules provided by bazelbuild/rules_go. + // If using gazelle, set this to false (or omit). + ManageGoRules bool + // If defined, metadata parsed from "+k8s:" codegen build tags will be saved into this file. + K8sCodegenBzlFile string + // If defined, contains the boilerplate text to be included in the header of the generated bzl file. + K8sCodegenBoilerplateFile string + // Which tags to include in the codegen bzl file. + // Include only the name of the tag. + // For example, to include +k8s:foo=bar, list "foo" here. + K8sCodegenTags []string +} + +// ReadCfg reads and unmarshals the specified json file into a Cfg struct. +func ReadCfg(cfgPath string) (*Cfg, error) { + b, err := ioutil.ReadFile(cfgPath) + if err != nil { + return nil, err + } + var cfg Cfg + if err := json.Unmarshal(b, &cfg); err != nil { + return nil, err + } + defaultCfg(&cfg) + return &cfg, nil +} + +func defaultCfg(c *Cfg) { + if len(c.SrcDirs) == 0 { + c.SrcDirs = []string{"."} + } +} diff --git a/cmd/kazel/diff.go b/cmd/kazel/diff.go new file mode 100644 index 0000000000..37bed9381c --- /dev/null +++ b/cmd/kazel/diff.go @@ -0,0 +1,60 @@ +/* +Copyright 2017 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "io/ioutil" + "os" + "os/exec" +) + +// Diff prints the unified diff of the two provided byte slices +// using the unix diff command. +func Diff(left, right []byte) error { + lf, err := ioutil.TempFile("/tmp", "actual-file-") + if err != nil { + return err + } + defer lf.Close() + defer os.Remove(lf.Name()) + + rf, err := ioutil.TempFile("/tmp", "expected-file-") + if err != nil { + return err + } + defer rf.Close() + defer os.Remove(rf.Name()) + + _, err = lf.Write(left) + if err != nil { + return err + } + lf.Close() + + _, err = rf.Write(right) + if err != nil { + return err + } + rf.Close() + + cmd := exec.Command("/usr/bin/diff", "-u", lf.Name(), rf.Name()) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Run() + + return nil +} diff --git a/cmd/kazel/generator.go b/cmd/kazel/generator.go new file mode 100644 index 0000000000..2cb8c0fc7e --- /dev/null +++ b/cmd/kazel/generator.go @@ -0,0 +1,178 @@ +/* +Copyright 2017 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "io/ioutil" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + + "github.com/bazelbuild/buildtools/build" +) + +var ( + // Generator tags are specified using the format "// +k8s:name=value" + genTagRe = regexp.MustCompile(`//\s*\+k8s:([^\s=]+)(?:=(\S+))\s*\n`) +) + +// {tagName: {value: {pkgs}}} or {tagName: {pkg: {values}}} +type generatorTagsMap map[string]map[string]map[string]bool + +// extractTags finds k8s codegen tags found in b listed in requestedTags. +// It returns a map of {tag name: slice of values for that tag}. +func extractTags(b []byte, requestedTags map[string]bool) map[string][]string { + tags := make(map[string][]string) + matches := genTagRe.FindAllSubmatch(b, -1) + for _, m := range matches { + if len(m) >= 3 { + tag, values := string(m[1]), string(m[2]) + if _, requested := requestedTags[tag]; !requested { + continue + } + tags[tag] = append(tags[tag], strings.Split(values, ",")...) + } + } + return tags +} + +// findGeneratorTags searches for all packages under root that include a kubernetes generator +// tag comment. It does not follow symlinks, and any path in the configured skippedPaths +// or codegen skipped paths is skipped. +func (v *Vendorer) findGeneratorTags(root string, requestedTags map[string]bool) (tagsValuesPkgs, tagsPkgsValues generatorTagsMap, err error) { + tagsValuesPkgs = make(generatorTagsMap) + tagsPkgsValues = make(generatorTagsMap) + + err = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + pkg := filepath.Dir(path) + + for _, r := range v.skippedK8sCodegenPaths { + if r.MatchString(pkg) { + return filepath.SkipDir + } + } + + if !strings.HasSuffix(path, ".go") || strings.HasSuffix(path, "_test.go") { + return nil + } + + b, err := ioutil.ReadFile(path) + if err != nil { + return err + } + + for tag, values := range extractTags(b, requestedTags) { + if _, present := tagsValuesPkgs[tag]; !present { + tagsValuesPkgs[tag] = make(map[string]map[string]bool) + } + if _, present := tagsPkgsValues[tag]; !present { + tagsPkgsValues[tag] = make(map[string]map[string]bool) + } + if _, present := tagsPkgsValues[tag][pkg]; !present { + tagsPkgsValues[tag][pkg] = make(map[string]bool) + } + for _, v := range values { + if _, present := tagsValuesPkgs[tag][v]; !present { + tagsValuesPkgs[tag][v] = make(map[string]bool) + } + // Since multiple files in the same package may list a given tag/value, use a set to deduplicate. + tagsValuesPkgs[tag][v][pkg] = true + tagsPkgsValues[tag][pkg][v] = true + } + } + + return nil + }) + + if err != nil { + return nil, nil, err + } + + return +} + +// flattened returns a copy of the map with the final stringSet flattened into a sorted slice. +func flattened(m generatorTagsMap) map[string]map[string][]string { + flattened := make(map[string]map[string][]string) + for tag, subMap := range m { + flattened[tag] = make(map[string][]string) + for k, subSet := range subMap { + for v := range subSet { + flattened[tag][k] = append(flattened[tag][k], v) + } + sort.Strings(flattened[tag][k]) + } + } + return flattened +} + +// walkGenerated generates a k8s codegen bzl file that can be parsed by Starlark +// rules and macros to find packages needed k8s code generation. +// This involves reading all non-test go sources in the tree and looking for +// "+k8s:name=value" tags. Only those tags listed in K8sCodegenTags will be +// included. +// If a K8sCodegenBoilerplateFile was configured, the contents of this file +// will be included as the header of the generated bzl file. +// Returns true if there are diffs against the existing generated bzl file. +func (v *Vendorer) walkGenerated() (bool, error) { + if v.cfg.K8sCodegenBzlFile == "" { + return false, nil + } + // only include the specified tags + requestedTags := make(map[string]bool) + for _, tag := range v.cfg.K8sCodegenTags { + requestedTags[tag] = true + } + tagsValuesPkgs, tagsPkgsValues, err := v.findGeneratorTags(".", requestedTags) + if err != nil { + return false, err + } + + f := &build.File{ + Path: v.cfg.K8sCodegenBzlFile, + } + addCommentBefore(f, "#################################################") + addCommentBefore(f, "# # # # # # # # # # # # # # # # # # # # # # # # #") + addCommentBefore(f, "This file is autogenerated by kazel. DO NOT EDIT.") + addCommentBefore(f, "# # # # # # # # # # # # # # # # # # # # # # # # #") + addCommentBefore(f, "#################################################") + addCommentBefore(f, "") + + f.Stmt = append(f.Stmt, varExpr("go_prefix", "The go prefix passed to kazel", v.cfg.GoPrefix)) + f.Stmt = append(f.Stmt, varExpr("kazel_configured_tags", "The list of codegen tags kazel is configured to find", v.cfg.K8sCodegenTags)) + f.Stmt = append(f.Stmt, varExpr("tags_values_pkgs", "tags_values_pkgs is a dictionary mapping {k8s build tag: {tag value: [pkgs including that tag:value]}}", flattened(tagsValuesPkgs))) + f.Stmt = append(f.Stmt, varExpr("tags_pkgs_values", "tags_pkgs_values is a dictionary mapping {k8s build tag: {pkg: [tag values in pkg]}}", flattened(tagsPkgsValues))) + + var boilerplate []byte + if v.cfg.K8sCodegenBoilerplateFile != "" { + boilerplate, err = ioutil.ReadFile(v.cfg.K8sCodegenBoilerplateFile) + if err != nil { + return false, err + } + } + // Open existing file to use in diff mode. + _, err = os.Stat(f.Path) + if err != nil && !os.IsNotExist(err) { + return false, err + } + return writeFile(f.Path, f, boilerplate, !os.IsNotExist(err), v.dryRun) +} diff --git a/cmd/kazel/generator_test.go b/cmd/kazel/generator_test.go new file mode 100644 index 0000000000..b5653e6ce4 --- /dev/null +++ b/cmd/kazel/generator_test.go @@ -0,0 +1,122 @@ +/* +Copyright 2018 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "reflect" + "testing" +) + +func TestExtractTags(t *testing.T) { + requestedTags := map[string]bool{ + "foo-gen": true, + "baz-gen": true, + "quux-gen:with-extra@things": true, + } + var testCases = []struct { + src string + want map[string][]string + }{ + { + src: "// +k8s:foo-gen=a,b\n", + want: map[string][]string{"foo-gen": {"a", "b"}}, + }, + { + src: "// +k8s:bar-gen=a,b\n", + want: map[string][]string{}, + }, + { + src: "// +k8s:quux-gen=true\n", + want: map[string][]string{}, + }, + { + src: "// +k8s:quux-gen:with-extra@things=123\n", + want: map[string][]string{"quux-gen:with-extra@things": {"123"}}, + }, + { + src: `/* +This is a header. +*/ +// +k8s:foo-gen=first +// +k8s:bar-gen=true +// +build linux + +// +k8s:baz-gen=1,2,a +// +k8s:baz-gen=b + +// k8s:foo-gen=not-this-one +// commenting out this one too +k8s:foo-gen=disabled +// +k8s:foo-gen=ignore this one too + +// Let's repeat one! +// +k8s:baz-gen=b +// +k8s:foo-gen=last + +import "some package" +`, + want: map[string][]string{ + "foo-gen": {"first", "last"}, + "baz-gen": {"1", "2", "a", "b", "b"}, + }, + }, + } + + for _, testCase := range testCases { + result := extractTags([]byte(testCase.src), requestedTags) + if !reflect.DeepEqual(result, testCase.want) { + t.Errorf("extractTags(%v) = %v; want %v", testCase.src, result, testCase.want) + } + } +} + +func TestFlattened(t *testing.T) { + m := generatorTagsMap{ + "foo-gen": { + "a": { + "pkg/one": true, + "pkg/two": true, + }, + }, + "bar-gen": { + "true": { + "pkg/one": true, + "pkg/three": true, + // also test sorting - this should end up at the front of the slice + "a/pkg": true, + }, + "false": { + "pkg/one": true, + }, + }, + } + + want := map[string]map[string][]string{ + "foo-gen": { + "a": {"pkg/one", "pkg/two"}, + }, + "bar-gen": { + "true": {"a/pkg", "pkg/one", "pkg/three"}, + "false": {"pkg/one"}, + }, + } + + result := flattened(m) + if !reflect.DeepEqual(result, want) { + t.Errorf("flattened(%v) = %v; want %v", m, result, want) + } + +} diff --git a/cmd/kazel/kazel.go b/cmd/kazel/kazel.go new file mode 100644 index 0000000000..644c3dfb37 --- /dev/null +++ b/cmd/kazel/kazel.go @@ -0,0 +1,402 @@ +/* +Copyright 2017 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "bytes" + "flag" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "reflect" + "regexp" + "sort" + + "github.com/bazelbuild/buildtools/build" + + "k8s.io/klog/v2" +) + +const ( + automanagedTag = "automanaged" +) + +var ( + root = flag.String("root", ".", "root of go source") + dryRun = flag.Bool("dry-run", false, "run in dry mode") + printDiff = flag.Bool("print-diff", false, "print diff to stdout") + validate = flag.Bool("validate", false, "run in dry mode and exit nonzero if any BUILD files need to be updated") + cfgPath = flag.String("cfg-path", ".kazelcfg.json", "path to kazel config (relative paths interpreted relative to -repo.") +) + +func main() { + flag.Parse() + flag.Set("alsologtostderr", "true") + if *root == "" { + klog.Fatalf("-root argument is required") + } + if *validate { + *dryRun = true + } + v, err := newVendorer(*root, *cfgPath, *dryRun) + if err != nil { + klog.Fatalf("unable to build vendorer: %v", err) + } + if err = os.Chdir(v.root); err != nil { + klog.Fatalf("cannot chdir into root %q: %v", v.root, err) + } + if v.cfg.ManageGoRules { + klog.Fatalf("kazel no longer supports managing Go rules") + } + + wroteGenerated := false + if wroteGenerated, err = v.walkGenerated(); err != nil { + klog.Fatalf("err walking generated: %v", err) + } + if _, err = v.walkSource("."); err != nil { + klog.Fatalf("err walking source: %v", err) + } + written := 0 + if written, err = v.reconcileAllRules(); err != nil { + klog.Fatalf("err reconciling rules: %v", err) + } + if wroteGenerated { + written++ + } + if *validate && written > 0 { + fmt.Fprintf(os.Stderr, "\n%d BUILD files not up-to-date.\n", written) + os.Exit(1) + } +} + +// Vendorer collects context, configuration, and cache while walking the tree. +type Vendorer struct { + skippedPaths []*regexp.Regexp + skippedK8sCodegenPaths []*regexp.Regexp + dryRun bool + root string + cfg *Cfg + newRules map[string][]*build.Rule // package path -> list of rules to add or update + managedAttrs []string // which rule attributes kazel will overwrite +} + +func newVendorer(root, cfgPath string, dryRun bool) (*Vendorer, error) { + absRoot, err := filepath.Abs(root) + if err != nil { + return nil, fmt.Errorf("could not get absolute path: %v", err) + } + if !filepath.IsAbs(cfgPath) { + cfgPath = filepath.Join(absRoot, cfgPath) + } + cfg, err := ReadCfg(cfgPath) + if err != nil { + return nil, err + } + + v := Vendorer{ + dryRun: dryRun, + root: absRoot, + cfg: cfg, + newRules: make(map[string][]*build.Rule), + managedAttrs: []string{"srcs"}, + } + + builtIn, err := compileSkippedPaths([]string{"^\\.git", "^bazel-*"}) + if err != nil { + return nil, err + } + + sp, err := compileSkippedPaths(cfg.SkippedPaths) + if err != nil { + return nil, err + } + sp = append(builtIn, sp...) + v.skippedPaths = sp + + sop, err := compileSkippedPaths(cfg.SkippedK8sCodegenPaths) + if err != nil { + return nil, err + } + v.skippedK8sCodegenPaths = append(sop, sp...) + + return &v, nil + +} + +func writeRules(file *build.File, rules []*build.Rule) { + for _, rule := range rules { + file.Stmt = append(file.Stmt, rule.Call) + } +} + +func (v *Vendorer) addRules(pkgPath string, rules []*build.Rule) { + cleanPath := filepath.Clean(pkgPath) + v.newRules[cleanPath] = append(v.newRules[cleanPath], rules...) +} + +func (v *Vendorer) reconcileAllRules() (int, error) { + var paths []string + for path := range v.newRules { + paths = append(paths, path) + } + sort.Strings(paths) + written := 0 + for _, path := range paths { + w, err := ReconcileRules(path, v.newRules[path], v.managedAttrs, v.dryRun) + if w { + written++ + } + if err != nil { + return written, err + } + } + return written, nil +} + +// addCommentBefore adds a whole-line comment before the provided Expr. +func addCommentBefore(e build.Expr, comment string) { + c := e.Comment() + c.Before = append(c.Before, build.Comment{Token: fmt.Sprintf("# %s", comment)}) +} + +// varExpr creates a variable expression of the form "name = expr". +// v will be converted into an appropriate Expr using asExpr. +// The optional description will be included as a comment before the expression. +func varExpr(name, desc string, v interface{}) build.Expr { + e := &build.BinaryExpr{ + X: &build.LiteralExpr{Token: name}, + Op: "=", + Y: asExpr(v), + } + if desc != "" { + addCommentBefore(e, desc) + } + return e +} + +// rvSliceLessFunc returns a function that can be used with sort.Slice() or sort.SliceStable() +// to sort a slice of reflect.Values. +// It sorts ints and floats as their native kinds, and everything else as a string. +func rvSliceLessFunc(k reflect.Kind, vs []reflect.Value) func(int, int) bool { + switch k { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return func(i, j int) bool { return vs[i].Int() < vs[j].Int() } + case reflect.Float32, reflect.Float64: + return func(i, j int) bool { return vs[i].Float() < vs[j].Float() } + default: + return func(i, j int) bool { + return fmt.Sprintf("%v", vs[i]) < fmt.Sprintf("%v", vs[j]) + } + } +} + +// asExpr converts a native Go type into the equivalent Starlark expression using reflection. +// The keys of maps will be sorted for reproducibility. +func asExpr(e interface{}) build.Expr { + rv := reflect.ValueOf(e) + switch rv.Kind() { + case reflect.Bool: + return &build.LiteralExpr{Token: fmt.Sprintf("%t", e)} + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return &build.LiteralExpr{Token: fmt.Sprintf("%d", e)} + case reflect.Float32, reflect.Float64: + return &build.LiteralExpr{Token: fmt.Sprintf("%g", e)} + case reflect.String: + return &build.StringExpr{Value: e.(string)} + case reflect.Slice, reflect.Array: + var list []build.Expr + for i := 0; i < rv.Len(); i++ { + list = append(list, asExpr(rv.Index(i).Interface())) + } + return &build.ListExpr{List: list} + case reflect.Map: + var list []build.Expr + keys := rv.MapKeys() + sort.SliceStable(keys, rvSliceLessFunc(rv.Type().Key().Kind(), keys)) + for _, key := range keys { + list = append(list, &build.KeyValueExpr{ + Key: asExpr(key.Interface()), + Value: asExpr(rv.MapIndex(key).Interface()), + }) + } + return &build.DictExpr{List: list} + default: + klog.Fatalf("unhandled kind: %q for value: %q", rv.Kind(), rv) + return nil + } +} + +func newRule(rt, name string, attrs map[string]build.Expr) *build.Rule { + rule := &build.Rule{ + Call: &build.CallExpr{ + X: &build.LiteralExpr{Token: rt}, + }, + } + rule.SetAttr("name", asExpr(name)) + for k, v := range attrs { + rule.SetAttr(k, v) + } + rule.SetAttr("tags", asExpr([]string{automanagedTag})) + return rule +} + +// findBuildFile determines the name of a preexisting BUILD file, returning +// a default if no such file exists. +func findBuildFile(pkgPath string) (bool, string) { + options := []string{"BUILD.bazel", "BUILD"} + for _, b := range options { + path := filepath.Join(pkgPath, b) + info, err := os.Stat(path) + if err == nil && !info.IsDir() { + return true, path + } + } + return false, filepath.Join(pkgPath, "BUILD.bazel") +} + +// ReconcileRules reconciles, simplifies, and writes the rules for the specified package, adding +// additional dependency rules as needed. +func ReconcileRules(pkgPath string, rules []*build.Rule, managedAttrs []string, dryRun bool) (bool, error) { + _, path := findBuildFile(pkgPath) + info, err := os.Stat(path) + if err != nil && os.IsNotExist(err) { + f := &build.File{} + writeRules(f, rules) + return writeFile(path, f, nil, false, dryRun) + } else if err != nil { + return false, err + } + if info.IsDir() { + return false, fmt.Errorf("%q cannot be a directory", path) + } + b, err := ioutil.ReadFile(path) + if err != nil { + return false, err + } + f, err := build.Parse(path, b) + if err != nil { + return false, err + } + oldRules := make(map[string]*build.Rule) + for _, r := range f.Rules("") { + oldRules[r.Name()] = r + } + for _, r := range rules { + o, ok := oldRules[r.Name()] + if !ok { + f.Stmt = append(f.Stmt, r.Call) + continue + } + if !RuleIsManaged(o) { + continue + } + reconcileAttr := func(o, n *build.Rule, name string) { + if e := n.Attr(name); e != nil { + o.SetAttr(name, e) + } else { + o.DelAttr(name) + } + } + for _, attr := range managedAttrs { + reconcileAttr(o, r, attr) + } + delete(oldRules, r.Name()) + } + + for _, r := range oldRules { + if !RuleIsManaged(r) { + continue + } + f.DelRules(r.Kind(), r.Name()) + } + + return writeFile(path, f, nil, true, dryRun) +} + +// RuleIsManaged returns whether the provided rule is managed by this tool, +// based on the tags set on the rule. +func RuleIsManaged(r *build.Rule) bool { + for _, tag := range r.AttrStrings("tags") { + if tag == automanagedTag { + return true + } + } + return false +} + +// writeFile writes out f to path, prepending boilerplate to the output. +// If exists is true, compares against the existing file specified by path, +// returning false if there are no changes. +// Otherwise, returns true. +// If dryRun is false, no files are actually changed; otherwise, the file will be written. +func writeFile(path string, f *build.File, boilerplate []byte, exists, dryRun bool) (bool, error) { + var info build.RewriteInfo + build.Rewrite(f, &info) + var out []byte + out = append(out, boilerplate...) + // double format the source file as our modification logic sometimes uses + // LiteralExpr where it should use other types of expressions, and this + // prevents issues where kazel thus formats structures incorrectly. + // :this_is_fine: + outData := build.Format(f) + var err error + f, err = build.Parse(path, outData) + if err != nil { + return false, fmt.Errorf("internal error occurred formatting file: %v", err) + } + // also call Rewrite again to run Buildifier against the results as + // visibility rules are not ordered correctly for some reason + build.Rewrite(f, &info) + out = append(out, build.Format(f)...) + if exists { + orig, err := ioutil.ReadFile(path) + if err != nil { + return false, err + } + if bytes.Compare(orig, out) == 0 { + return false, nil + } + if *printDiff { + Diff(orig, out) + } + } + if dryRun { + fmt.Fprintf(os.Stderr, "DRY-RUN: wrote %q\n", path) + return true, nil + } + werr := ioutil.WriteFile(path, out, 0644) + if werr == nil { + fmt.Fprintf(os.Stderr, "wrote %q\n", path) + } + return werr == nil, werr +} + +func compileSkippedPaths(skippedPaths []string) ([]*regexp.Regexp, error) { + regexPaths := []*regexp.Regexp{} + + for _, sp := range skippedPaths { + r, err := regexp.Compile(sp) + if err != nil { + return nil, err + } + regexPaths = append(regexPaths, r) + } + return regexPaths, nil +} diff --git a/cmd/kazel/kazel_test.go b/cmd/kazel/kazel_test.go new file mode 100644 index 0000000000..e4dd68e140 --- /dev/null +++ b/cmd/kazel/kazel_test.go @@ -0,0 +1,76 @@ +/* +Copyright 2018 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "testing" + + "github.com/bazelbuild/buildtools/build" +) + +func TestAsExpr(t *testing.T) { + var testCases = []struct { + expr interface{} + want string + }{ + {42, "42"}, + {2.71828, "2.71828"}, + {2.718281828459045, "2.718281828459045"}, + {"a string", `"a string"`}, + // values should stay in specified order + {[]int{4, 7, 2, 9, 21}, `[ + 4, + 7, + 2, + 9, + 21, +]`}, + // keys should get sorted + {map[int]string{1: "foo", 5: "baz", 3: "bar"}, `{ + 1: "foo", + 3: "bar", + 5: "baz", +}`}, + // keys true and false should be sorted by their string representation + { + map[bool]map[string][]float64{ + true: {"b": {2, 2.2}, "a": {1, 1.1, 1.11}}, + false: {"": {}}, + }, + `{ + false: {"": []}, + true: { + "a": [ + 1, + 1.1, + 1.11, + ], + "b": [ + 2, + 2.2, + ], + }, +}`}, + } + + for _, testCase := range testCases { + result := build.FormatString(asExpr(testCase.expr)) + if result != testCase.want { + t.Errorf("asExpr(%v) = %v; want %v", testCase.expr, result, testCase.want) + } + } +} diff --git a/cmd/kazel/sourcerer.go b/cmd/kazel/sourcerer.go new file mode 100644 index 0000000000..68ef83146b --- /dev/null +++ b/cmd/kazel/sourcerer.go @@ -0,0 +1,109 @@ +/* +Copyright 2017 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "fmt" + "io/ioutil" + "path/filepath" + + "github.com/bazelbuild/buildtools/build" +) + +const ( + pkgSrcsTarget = "package-srcs" + allSrcsTarget = "all-srcs" +) + +// walkSource walks the source tree recursively from pkgPath, adding +// any BUILD files to v.newRules to be formatted. +// +// If AddSourcesRules is enabled in the kazel config, then we additionally add +// package-sources and recursive all-srcs filegroups rules to every BUILD file. +// +// Returns the list of children all-srcs targets that should be added to the +// all-srcs rule of the enclosing package. +func (v *Vendorer) walkSource(pkgPath string) ([]string, error) { + // clean pkgPath since we access v.newRules directly + pkgPath = filepath.Clean(pkgPath) + for _, r := range v.skippedPaths { + if r.MatchString(pkgPath) { + return nil, nil + } + } + files, err := ioutil.ReadDir(pkgPath) + if err != nil { + return nil, err + } + + // Find any children packages we need to include in an all-srcs rule. + var children []string + for _, f := range files { + if f.IsDir() { + c, err := v.walkSource(filepath.Join(pkgPath, f.Name())) + if err != nil { + return nil, err + } + children = append(children, c...) + } + } + + // This path is a package either if we've added rules or if a BUILD file already exists. + _, hasRules := v.newRules[pkgPath] + isPkg := hasRules + if !isPkg { + isPkg, _ = findBuildFile(pkgPath) + } + + if !isPkg { + // This directory isn't a package (doesn't contain a BUILD file), + // but there might be subdirectories that are packages, + // so pass that up to our parent. + return children, nil + } + + // Enforce formatting the BUILD file, even if we're not adding srcs rules + if !hasRules { + v.addRules(pkgPath, nil) + } + + if !v.cfg.AddSourcesRules { + return nil, nil + } + + pkgSrcsExpr := &build.LiteralExpr{Token: `glob(["**"])`} + if pkgPath == "." { + pkgSrcsExpr = &build.LiteralExpr{Token: `glob(["**"], exclude=["bazel-*/**", ".git/**"])`} + } + + v.addRules(pkgPath, []*build.Rule{ + newRule("filegroup", + pkgSrcsTarget, + map[string]build.Expr{ + "srcs": pkgSrcsExpr, + "visibility": asExpr([]string{"//visibility:private"}), + }), + newRule("filegroup", + allSrcsTarget, + map[string]build.Expr{ + "srcs": asExpr(append(children, fmt.Sprintf(":%s", pkgSrcsTarget))), + // TODO: should this be more restricted? + "visibility": asExpr([]string{"//visibility:public"}), + }), + }) + return []string{fmt.Sprintf("//%s:%s", pkgPath, allSrcsTarget)}, nil +} diff --git a/code-of-conduct.md b/code-of-conduct.md new file mode 100644 index 0000000000..0d15c00cf3 --- /dev/null +++ b/code-of-conduct.md @@ -0,0 +1,3 @@ +# Kubernetes Community Code of Conduct + +Please refer to our [Kubernetes Community Code of Conduct](https://git.k8s.io/community/code-of-conduct.md) diff --git a/defs/BUILD.bazel b/defs/BUILD.bazel new file mode 100644 index 0000000000..467d7a774b --- /dev/null +++ b/defs/BUILD.bazel @@ -0,0 +1,62 @@ +load(":pkg.bzl", "pkg_tar") +load(":build.bzl", "release_filegroup") +load("@subpar//:subpar.bzl", "par_binary") + +par_binary( + name = "gcs_uploader", + srcs = [ + "gcs_uploader.py", + ], + python_version = "PY2", + visibility = ["//visibility:public"], +) + +pkg_tar( + name = "pkg_tar_smoke", + srcs = glob(["*.bzl"]), +) + +# generate the hash files to use in the sh_tests below +release_filegroup( + name = "testfile", + testonly = True, + srcs = [":testdata/testfile.txt"], +) + +[ + sh_test( + name = "test_gen%ssum" % hash, + srcs = ["diff_test.sh"], + args = [ + "$(location testdata/testfile.txt.%s.expected)" % hash, + "$(location testdata/testfile.txt.%s)" % hash, + ], + data = [ + ":testdata/testfile.txt.%s" % hash, + ":testdata/testfile.txt.%s.expected" % hash, + ], + ) + for hash in [ + "md5", + "sha1", + "sha512", + ] +] + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [ + ":package-srcs", + "//defs/testgen:all-srcs", + "//defs/testpkg:all-srcs", + ], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/defs/build.bzl b/defs/build.bzl new file mode 100644 index 0000000000..9955a7e469 --- /dev/null +++ b/defs/build.bzl @@ -0,0 +1,200 @@ +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Rules for uploading, hashing and creating release groups.""" + +def _gcs_upload_impl(ctx): + output_lines = [] + for t in ctx.attr.data: + label = str(t.label) + upload_path = ctx.attr.upload_paths.get(label, "") + for f in t.files.to_list(): + output_lines.append("%s\t%s" % (f.short_path, upload_path)) + + ctx.actions.write( + output = ctx.outputs.targets, + content = "\n".join(output_lines), + ) + + ctx.actions.write( + content = "%s --manifest %s --root $PWD -- $@" % ( + ctx.attr.uploader.files_to_run.executable.short_path, + ctx.outputs.targets.short_path, + ), + output = ctx.outputs.executable, + is_executable = True, + ) + + # TODO(fejta): migrate to provider + return struct( + runfiles = ctx.runfiles( + files = ctx.files.data + ctx.files.uploader + [ctx.info_file, ctx.version_file, ctx.outputs.targets], + ), + ) + +# Adds an executable rule to upload the specified artifacts to GCS. +# +# The keys in upload_paths must match the elaborated targets exactly; i.e., +# one must specify "//foo/bar:bar" and not just "//foo/bar". +# +# Both the upload_paths and the path supplied on the commandline can include +# Python format strings which will be replaced by values from the workspace status, +# e.g. gs://my-bucket-{BUILD_USER}/stash/{STABLE_BUILD_SCM_REVISION} +gcs_upload = rule( + attrs = { + "data": attr.label_list( + mandatory = True, + allow_files = True, + ), + "uploader": attr.label( + default = Label("//defs:gcs_uploader.par"), + allow_files = True, + ), + # TODO: combine with 'data' when label_keyed_string_dict is supported in Bazel + "upload_paths": attr.string_dict( + allow_empty = True, + ), + }, + executable = True, + outputs = { + "targets": "%{name}-targets.txt", + }, + implementation = _gcs_upload_impl, +) + +def md5sum(name, src, **kwargs): + """Computes the md5sum of the provided src file, saving it in a file named 'name'.""" + native.genrule( + name = name + "_genmd5sum", + srcs = [src], + outs = [name], + cmd = "command -v md5 >/dev/null && cmd='md5 -q' || cmd=md5sum; $$cmd $< | awk '{print $$1}' >$@", + message = "Computing md5sum", + **kwargs + ) + +def sha1sum(name, src, **kwargs): + """Computes the sha1sum of the provided src file, saving it in a file named 'name'.""" + native.genrule( + name = name + "_gensha1sum", + srcs = [src], + outs = [name], + cmd = "command -v sha1sum >/dev/null && cmd=sha1sum || cmd='shasum -a1'; $$cmd $< | awk '{print $$1}' >$@", + message = "Computing sha1sum", + **kwargs + ) + +def sha512sum(name, src, **kwargs): + """Computes the sha512sum of the provided src file, saving it in a file named 'name'.""" + native.genrule( + name = name + "_gensha512sum", + srcs = [src], + outs = [name], + cmd = "command -v sha512sum >/dev/null && cmd=sha512sum || cmd='shasum -a512'; $$cmd $< | awk '{print $$1}' >$@", + message = "Computing sha512sum", + **kwargs + ) + +def _hashes_for_srcs(srcs, srcs_basenames_needing_hashes): + """Returns a list of hash target names for the provided srcs. + + Also updates the srcs_basenames_needing_hashes dictionary, + mapping src name to basename for each target in srcs. + """ + hashes = [] + for src in srcs: + parts = src.split(":") + if len(parts) > 1: + basename = parts[1] + else: + basename = src.split("/")[-1] + + srcs_basenames_needing_hashes[src] = basename + hashes.append(basename + ".md5") + hashes.append(basename + ".sha1") + hashes.append(basename + ".sha512") + return hashes + +def release_filegroup(name, srcs = None, conditioned_srcs = None, tags = None, visibility = None, **kwargs): + """Creates a variety of filegroups.""" + # TODO(fejta): better function doc. + + # Creates 3+N rules based on the provided targets: + # * A filegroup with just the provided targets (named 'name') + # * A filegroup containing all of the md5, sha1 and sha512 hash files ('name-hashes') + # * A filegroup containing both of the above ('name-and-hashes') + # * All of the necessary md5sum, sha1sum and sha512sum rules + + # The targets are specified using the srcs and conditioned_srcs attributes. + # srcs is expected to be label list. + # conditioned_srcs is a dictionary mapping conditions to label lists. + # It will be passed to select(). + if not srcs and not conditioned_srcs: + fail("srcs and conditioned_srcs cannot both be empty") + srcs = srcs or [] + + # A given src may occur in multiple conditioned_srcs, but we want to create the hash + # rules only once, so use a dictionary to deduplicate. + srcs_basenames_needing_hashes = {} + + hashes = _hashes_for_srcs(srcs, srcs_basenames_needing_hashes) + conditioned_hashes = {} + if conditioned_srcs: + for condition, csrcs in conditioned_srcs.items(): + conditioned_hashes[condition] = _hashes_for_srcs(csrcs, srcs_basenames_needing_hashes) + + hash_tags = tags or [] + hash_tags.append("manual") + for src, basename in srcs_basenames_needing_hashes.items(): + md5sum(name = basename + ".md5", src = src, tags = hash_tags, visibility = visibility) + sha1sum(name = basename + ".sha1", src = src, tags = hash_tags, visibility = visibility) + sha512sum(name = basename + ".sha512", src = src, tags = hash_tags, visibility = visibility) + + if conditioned_srcs: + native.filegroup( + name = name, + srcs = srcs + select(conditioned_srcs), + tags = tags, + **kwargs + ) + native.filegroup( + name = name + "-hashes", + srcs = hashes + select(conditioned_hashes), + tags = tags, + visibility = visibility, + **kwargs + ) + else: + native.filegroup( + name = name, + srcs = srcs, + tags = tags, + visibility = visibility, + **kwargs + ) + native.filegroup( + name = name + "-hashes", + srcs = hashes, + tags = tags, + visibility = visibility, + **kwargs + ) + + native.filegroup( + name = name + "-and-hashes", + srcs = [name, name + "-hashes"], + tags = tags, + visibility = visibility, + **kwargs + ) diff --git a/defs/deb.bzl b/defs/deb.bzl new file mode 100644 index 0000000000..edff08e823 --- /dev/null +++ b/defs/deb.bzl @@ -0,0 +1,66 @@ +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Defines rules for creating debian packages.""" + +load("//defs:pkg.bzl", "pkg_tar") +load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_deb") + +KUBERNETES_AUTHORS = "Kubernetes Authors " + +KUBERNETES_HOMEPAGE = "http://kubernetes.io" + +GOARCH_TO_DEBARCH = { + "386": "i386", + "amd64": "amd64", + "arm": "armhf", + "arm64": "arm64", + "ppc64le": "ppc64el", + "s390x": "s390x", +} + +def k8s_deb(name, goarch = "amd64", tags = None, **kwargs): + """k8s_deb calls pkg_deb for a particular architecture, setting contact info.""" + debarch = GOARCH_TO_DEBARCH[goarch] + pkg_deb( + name = name + "-" + goarch, + architecture = debarch, + data = select({"@io_bazel_rules_go//go/platform:" + goarch: name + "-data-" + goarch}), + homepage = KUBERNETES_HOMEPAGE, + maintainer = KUBERNETES_AUTHORS, + package = name, + tags = tags, + **kwargs + ) + +def deb_data(name, goarch = "amd64", data = [], tags = None, visibility = None): + """deb_data creates pkg_tar files for each datum.""" + deps = [] + for i, info in enumerate(data): + dname = "%s-deb-data-%s-%s" % (name, goarch, i) + deps.append(dname) + pkg_tar( + name = dname, + srcs = select({"@io_bazel_rules_go//go/platform:" + goarch: info["files"]}), + mode = info["mode"], + package_dir = info["dir"], + tags = tags, + visibility = visibility, + ) + pkg_tar( + name = name + "-data-" + goarch, + tags = tags, + visibility = visibility, + deps = select({"@io_bazel_rules_go//go/platform:" + goarch: deps}), + ) diff --git a/defs/diff_test.sh b/defs/diff_test.sh new file mode 100755 index 0000000000..437ce01882 --- /dev/null +++ b/defs/diff_test.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +# Copyright 2019 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +expected=$1 +generated=$2 + +diff=$(diff -u "${expected}" "${generated}" || true) + +if [[ -n "${diff}" ]]; then + echo "Generated file ${generated} does not match expected file ${expected}" + echo "${diff}" + exit 1 +fi diff --git a/defs/gcs_uploader.py b/defs/gcs_uploader.py new file mode 100644 index 0000000000..fa593fc19b --- /dev/null +++ b/defs/gcs_uploader.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python + +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +import argparse +import atexit +import os +import os.path +import shutil +import subprocess +import sys +import tempfile + +def _workspace_status_dict(root): + d = {} + for f in ("stable-status.txt", "volatile-status.txt"): + with open(os.path.join(root, f)) as info_file: + for info_line in info_file: + info_line = info_line.strip("\n") + key, value = info_line.split(" ") + d[key] = value + return d + +def main(argv): + scratch = tempfile.mkdtemp(prefix="bazel-gcs.") + atexit.register(lambda: shutil.rmtree(scratch)) + + workspace_status = _workspace_status_dict(argv.root) + with open(argv.manifest) as manifest: + for artifact in manifest: + artifact = artifact.strip("\n") + src_file, dest_dir = artifact.split("\t") + dest_dir = dest_dir.format(**workspace_status) + scratch_dest_dir = os.path.join(scratch, dest_dir) + try: + os.makedirs(scratch_dest_dir) + except (OSError): + # skip directory already exists errors + pass + + src = os.path.join(argv.root, src_file) + dest = os.path.join(scratch_dest_dir, os.path.basename(src_file)) + os.symlink(src, dest) + + ret = 0 + uploaded_paths = [] + for gcs_path in argv.gcs_paths: + gcs_path = gcs_path.format(**workspace_status) + local_path = None + if gcs_path.startswith("file://"): + local_path = gcs_path[len("file://"):] + elif "://" not in gcs_path: + local_path = gcs_path + if local_path and not os.path.exists(local_path): + os.makedirs(local_path) + + cmd = ["gsutil"] + # When rsyncing to a local directory, parallelization thrashes the disk. + # It also seems to be buggy, causing frequent "File exists" errors. + # To mitigate, only use parallel mode when rsyncing to a remote path. + if not local_path: + cmd.append("-m") + cmd.extend(["rsync", "-C", "-r", scratch, gcs_path]) + ret |= subprocess.call(cmd) + + uploaded_paths.append(gcs_path) + + print("Uploaded to %s" % " ".join(uploaded_paths)) + sys.exit(ret) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Upload build targets to GCS.') + + parser.add_argument("--manifest", required=True, help="path to manifest of targets") + parser.add_argument("--root", required=True, help="path to root of workspace") + parser.add_argument("gcs_paths", nargs="+", help="path in gcs to push targets") + + main(parser.parse_args()) diff --git a/defs/go.bzl b/defs/go.bzl new file mode 100644 index 0000000000..8b055d7bb9 --- /dev/null +++ b/defs/go.bzl @@ -0,0 +1,124 @@ +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Rules for generating gocode at compile time.""" + +load("@bazel_skylib//lib:paths.bzl", "paths") +load("@io_bazel_rules_go//go:def.bzl", "GoArchive", "GoLibrary", "go_context", "go_rule") + +def _compute_genrule_variables(srcs, outs): + resolved_srcs = [src.path for src in srcs] + resolved_outs = [out.path for out in outs] + variables = { + "SRCS": " ".join(resolved_srcs), + "OUTS": " ".join(resolved_outs), + } + if len(resolved_srcs) == 1: + variables["<"] = resolved_srcs[0] + if len(resolved_outs) == 1: + variables["@"] = resolved_outs[0] + return variables + +def _go_genrule_impl(ctx): + go = go_context(ctx) + + transitive_libs = depset(transitive = [d[GoArchive].transitive for d in ctx.attr.go_deps]) + + gopath = [] + for lib in transitive_libs.to_list(): + for srcfile in lib.srcs: + target = ctx.actions.declare_file(paths.join( + "gopath/src", + lib.importpath, + paths.basename(srcfile.path), + )) + + args = ctx.actions.args() + args.add(srcfile.path) + args.add(target.path) + + ctx.actions.run( + executable = "mv", + arguments = [args], + inputs = [srcfile], + outputs = [target], + mnemonic = "PrepareGopath", + ) + + gopath.append(target) + + srcs = [src for srcs in ctx.attr.srcs for src in srcs.files.to_list()] + + deps = depset( + gopath + srcs, + transitive = + # tools + [dep.files for dep in ctx.attr.tools] + + # go toolchain + [depset(go.sdk.libs + go.sdk.srcs + go.sdk.tools + [go.sdk.go])], + ) + + _, cmd, _ = ctx.resolve_command( + command = ctx.attr.cmd, + attribute = "cmd", + expand_locations = True, + make_variables = _compute_genrule_variables( + srcs, + ctx.outputs.outs, + ), + tools = ctx.attr.tools, + ) + + env = dict() + env.update(ctx.configuration.default_shell_env) + env.update(go.env) + env.update({ + "PATH": ctx.configuration.host_path_separator.join(["/usr/local/bin", "/bin", "/usr/bin"]), + "GOPATH": paths.join(ctx.bin_dir.path, paths.dirname(ctx.build_file_path), "gopath"), + "GOROOT": paths.dirname(go.sdk.root_file.path), + }) + + ctx.actions.run_shell( + inputs = deps, + outputs = ctx.outputs.outs, + env = env, + command = cmd, + progress_message = "%s %s" % (ctx.attr.message, ctx), + mnemonic = "GoGenrule", + ) + +# We have codegen procedures that depend on the "go/*" stdlib packages +# and thus depend on executing with a valid GOROOT. _go_genrule handles +# dependencies on the Go toolchain and environment variables; the +# macro go_genrule handles setting up GOPATH dependencies (using go_path). +go_genrule = go_rule( + _go_genrule_impl, + attrs = { + "srcs": attr.label_list(allow_files = True), + "tools": attr.label_list( + cfg = "host", + allow_files = True, + ), + "outs": attr.output_list(mandatory = True), + "cmd": attr.string(mandatory = True), + "go_deps": attr.label_list(providers = [ + GoLibrary, + GoArchive, + ]), + "importpath": attr.string(), + "message": attr.string(), + "executable": attr.bool(default = False), + }, + output_to_genfiles = True, +) diff --git a/defs/pkg.bzl b/defs/pkg.bzl new file mode 100644 index 0000000000..ef947990cb --- /dev/null +++ b/defs/pkg.bzl @@ -0,0 +1,34 @@ +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A faster implementation of the pkg_tar rule. + +pkg_tar wraps the official pkg_tar rule with our faster +Go-based build_tar binary. +Additionally, the upstream pkg_tar rule defaults mode to "0555", +which prevents build_tar from automatically choosing an +appropriate mode, so we instead default it to "". +""" + +load( + "@bazel_tools//tools/build_defs/pkg:pkg.bzl", + _real_pkg_tar = "pkg_tar", +) + +def pkg_tar( + build_tar = "@io_k8s_repo_infra//tools/build_tar", + mode = "", + **kwargs): + """pkg_tar wraps the official pkg_tar rule with a faster version.""" + _real_pkg_tar(build_tar = build_tar, mode = mode, **kwargs) diff --git a/defs/rpm.bzl b/defs/rpm.bzl new file mode 100644 index 0000000000..f32e3af6a3 --- /dev/null +++ b/defs/rpm.bzl @@ -0,0 +1,44 @@ +# Copyright 2019 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Rules for creating redhat packages.""" + +load("@bazel_tools//tools/build_defs/pkg:rpm.bzl", "pkg_rpm") + +GOARCH_TO_RPMARCH = { + "386": "i386", + "amd64": "x86_64", + "arm": "armhfp", + "arm64": "aarch64", + "ppc64le": "ppc64le", + "s390x": "s390x", +} + +def pkg_rpm_for_goarch(name, data, goarch, tags = None, **kwargs): + """Creates a pkg_rpm filtering to data relevant to the specified goarch.""" + rpmarch = GOARCH_TO_RPMARCH[goarch] + pkg_rpm( + name = name + "-" + goarch, + architecture = rpmarch, + data = select( + { + "@io_bazel_rules_go//go/platform:" + goarch: [ + d.format(GOARCH = goarch, RPMARCH = rpmarch) + for d in data + ], + }, + ), + tags = tags, + **kwargs + ) diff --git a/defs/run_in_workspace.bzl b/defs/run_in_workspace.bzl new file mode 100644 index 0000000000..d92c15bf5e --- /dev/null +++ b/defs/run_in_workspace.bzl @@ -0,0 +1,91 @@ +# Copyright 2018 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Defines rules for starting a process in the workspace root. + +This technique was inspired by the gazelle rule implementation in bazelbuild/rules_go: +https://github.com/bazelbuild/rules_go/blob/86ade29284ca11deeead86c061e9ba9bd0d157e0/go/private/tools/gazelle.bzl +""" + +def _workspace_binary_script_impl(ctx): + content = """#!/usr/bin/env bash +set -o errexit +set -o nounset +set -o pipefail + +if [[ -n "${{BUILD_WORKSPACE_DIRECTORY:-}}" ]]; then + # Running from inside bazel + cd "${{BUILD_WORKSPACE_DIRECTORY}}" +else + # Running from bazel-bin + cd "$(git rev-parse --show-toplevel)" +fi +# bazel-repo-infra will handle both external and local binaries, aka +# bazel-repo-infra/external/go_sdk/bin/go +# bazel-repo-infra/bazel-out/k8-fastbuild/bin/cmd/kazel/linux_amd64_stripped/kazel +"bazel-${{PWD##*/}}/{cmd}" "$@" +""".format( + cmd = ctx.file.cmd.path, + ) + ctx.actions.write( + output = ctx.outputs.executable, + content = content, + is_executable = True, + ) + runfiles = ctx.runfiles( + files = [ + ctx.file.cmd, + ], + ) + return [DefaultInfo(runfiles = runfiles)] + +_workspace_binary_script = rule( + attrs = { + "cmd": attr.label( + mandatory = True, + allow_single_file = True, + ), + }, + executable = True, + implementation = _workspace_binary_script_impl, +) + +def workspace_binary( + name, + cmd, + args = None, + visibility = None): + """Wraps a binary to be run in the workspace root via bazel run. + + For example, one might do something like + + workspace_binary( + name = "dep", + cmd = "//vendor/github.com/golang/dep/cmd/dep", + ) + which would allow running dep with bazel run. + """ + script_name = name + "_script" + _workspace_binary_script( + name = script_name, + cmd = cmd, + tags = ["manual"], + ) + native.sh_binary( + name = name, + srcs = [":" + script_name], + args = args, + visibility = visibility, + tags = ["manual"], + ) diff --git a/defs/testdata/testfile.txt b/defs/testdata/testfile.txt new file mode 100644 index 0000000000..ba5a5d3da4 --- /dev/null +++ b/defs/testdata/testfile.txt @@ -0,0 +1 @@ +This is just some data to hash diff --git a/defs/testdata/testfile.txt.md5.expected b/defs/testdata/testfile.txt.md5.expected new file mode 100644 index 0000000000..79c56b5a5b --- /dev/null +++ b/defs/testdata/testfile.txt.md5.expected @@ -0,0 +1 @@ +6c840392943ddfc0d213a8786ddcf913 diff --git a/defs/testdata/testfile.txt.sha1.expected b/defs/testdata/testfile.txt.sha1.expected new file mode 100644 index 0000000000..76ef2bce9e --- /dev/null +++ b/defs/testdata/testfile.txt.sha1.expected @@ -0,0 +1 @@ +a1ee087329ef524229a8eb3dadc33265a0d30288 diff --git a/defs/testdata/testfile.txt.sha512.expected b/defs/testdata/testfile.txt.sha512.expected new file mode 100644 index 0000000000..894b8a70ed --- /dev/null +++ b/defs/testdata/testfile.txt.sha512.expected @@ -0,0 +1 @@ +77f3b6ab7f0b30eac6c84baecd7308c462df575b5a1bea484253fcdf5a0f61f0d9aeaa5a25f51a1621c90d0cc461f041ba5dba7573092221287af620f0f1c573 diff --git a/defs/testgen/BUILD.bazel b/defs/testgen/BUILD.bazel new file mode 100644 index 0000000000..8b8a50f8ba --- /dev/null +++ b/defs/testgen/BUILD.bazel @@ -0,0 +1,28 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = ["main.go"], + importpath = "k8s.io/repo-infra/defs/testgen", + visibility = ["//defs:__subpackages__"], +) + +go_binary( + name = "testgen", + embed = [":go_default_library"], + visibility = ["//defs:__subpackages__"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/defs/testgen/main.go b/defs/testgen/main.go new file mode 100644 index 0000000000..b7800442ba --- /dev/null +++ b/defs/testgen/main.go @@ -0,0 +1,55 @@ +/* +Copyright 2019 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "flag" + "fmt" + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + "io/ioutil" + "log" +) + +var ( + in = flag.String("in", "", "input") + out = flag.String("out", "", "output") + pkgName = flag.String("pkg", "", "package") +) + +func main() { + flag.Parse() + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, *in, nil, 0) + if err != nil { + log.Fatal(err) + } + + conf := types.Config{Importer: importer.Default()} + + pkg, err := conf.Check(*pkgName, fset, []*ast.File{f}, nil) + if err != nil { + log.Fatal(err) + } + if err := ioutil.WriteFile(*out, []byte(fmt.Sprintf("package %s\nconst OK = true", pkg.Name())), 0666); err != nil { + log.Fatal(err) + } +} diff --git a/defs/testpkg/BUILD.bazel b/defs/testpkg/BUILD.bazel new file mode 100644 index 0000000000..ea931ae0c7 --- /dev/null +++ b/defs/testpkg/BUILD.bazel @@ -0,0 +1,38 @@ +load("//defs:go.bzl", "go_genrule") +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = [ + "ok.go", + "pkg.go", + ], + importpath = "k8s.io/repo-infra/defs/testpkg", + visibility = ["//visibility:public"], +) + +go_genrule( + name = "go_genrule_test", + srcs = [ + "pkg.go", + ], + outs = [ + "ok.go", + ], + cmd = "$(location //defs/testgen) -in=$< -out=$@ -pkg k8s.io/repo-infra/defs/testpkg", + tools = ["//defs/testgen"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/defs/testpkg/pkg.go b/defs/testpkg/pkg.go new file mode 100644 index 0000000000..c99b437bf9 --- /dev/null +++ b/defs/testpkg/pkg.go @@ -0,0 +1,27 @@ +/* +Copyright 2019 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package testpkg + +import ( + // Make sure stdlib is accessible + "fmt" +) + +// TestFunc is a func. +func TestFunc() { + fmt.Println("ok") +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000000..2ae3b9d630 --- /dev/null +++ b/go.mod @@ -0,0 +1,11 @@ +module k8s.io/repo-infra + +go 1.13 + +require ( + github.com/bazelbuild/bazel-gazelle v0.20.0 + github.com/bazelbuild/buildtools v0.0.0-20200228172928-c9d9e342afdb + github.com/golangci/golangci-lint v1.25.0 + golang.org/x/build v0.0.0-20200302185339-bb8466fe872a + k8s.io/klog/v2 v2.0.0 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000000..a2cff9908d --- /dev/null +++ b/go.sum @@ -0,0 +1,575 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/OpenPeeDeeP/depguard v1.0.1 h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us= +github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= +github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/bazelbuild/bazel-gazelle v0.20.0 h1:kRymV9q+24Mbeg25fJehw+gvrtVIlwZZAefOSUq4MzU= +github.com/bazelbuild/bazel-gazelle v0.20.0/go.mod h1:rPwzNHUqEzngx1iVBfO/2X2npKaT3tqPqqHW6rVsn/A= +github.com/bazelbuild/buildtools v0.0.0-20190731111112-f720930ceb60 h1:OfyUN/Msd8yqJww6deQ9vayJWw+Jrbe6Qp9giv51QQI= +github.com/bazelbuild/buildtools v0.0.0-20190731111112-f720930ceb60/go.mod h1:5JP0TXzWDHXv8qvxRC4InIazwdyDseBDbzESUMKk1yU= +github.com/bazelbuild/buildtools v0.0.0-20200228172928-c9d9e342afdb h1:F2UHxHXipTXxTmIKALHwAdNsvTPhSkgshcTNCMPJj1M= +github.com/bazelbuild/buildtools v0.0.0-20200228172928-c9d9e342afdb/go.mod h1:5JP0TXzWDHXv8qvxRC4InIazwdyDseBDbzESUMKk1yU= +github.com/bazelbuild/rules_go v0.0.0-20190719190356-6dae44dc5cab h1:wzbawlkLtl2ze9w/312NHZ84c7kpUCtlkD8HgFY27sw= +github.com/bazelbuild/rules_go v0.0.0-20190719190356-6dae44dc5cab/go.mod h1:MC23Dc/wkXEyk3Wpq6lCqz0ZAYOZDw2DR5y3N1q2i7M= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/bombsimon/wsl/v3 v3.0.0 h1:w9f49xQatuaeTJFaNP4SpiWSR5vfT6IstPtM62JjcqA= +github.com/bombsimon/wsl/v3 v3.0.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= +github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= +github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= +github.com/go-critic/go-critic v0.4.1 h1:4DTQfT1wWwLg/hzxwD9bkdhDQrdJtxe6DUTadPlrIeE= +github.com/go-critic/go-critic v0.4.1/go.mod h1:7/14rZGnZbY6E38VEGk2kVhoq6itzc1E68facVDK23g= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-lintpack/lintpack v0.5.2 h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0= +github.com/go-lintpack/lintpack v0.5.2/go.mod h1:NwZuYi2nUHho8XEIZ6SIxihrnPoqBTDqfpXvXAN0sXM= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logr/logr v0.1.0 h1:M1Tv3VzNlEHg6uyACnRdtrploV2P7wZqH8BoQMtz0cg= +github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= +github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= +github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g= +github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= +github.com/go-toolsmith/astcopy v1.0.0 h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8= +github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ= +github.com/go-toolsmith/astequal v0.0.0-20180903214952-dcb477bfacd6/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/astfmt v0.0.0-20180903215011-8f8ee99c3086/go.mod h1:mP93XdblcopXwlyN4X4uodxXQhldPGZbcEJIimQHrkg= +github.com/go-toolsmith/astfmt v1.0.0 h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k= +github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= +github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= +github.com/go-toolsmith/astp v0.0.0-20180903215135-0af7e3c24f30/go.mod h1:SV2ur98SGypH1UjcPpCatrV5hPazG6+IfNHbkDXBRrk= +github.com/go-toolsmith/astp v1.0.0 h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg= +github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= +github.com/go-toolsmith/pkgload v0.0.0-20181119091011-e9e65178eee8/go.mod h1:WoMrjiy4zvdS+Bg6z9jZH82QXwkcgCBX6nOfnmdaHks= +github.com/go-toolsmith/pkgload v1.0.0 h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg= +github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= +github.com/go-toolsmith/typep v1.0.0 h1:zKymWyA1TRYvqYrYDrfEMZULyrhcnGY3x7LDKU2XQaA= +github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b h1:ekuhfTjngPhisSjOJ0QWKpPQE8/rbknHaes6WVJj5Hw= +github.com/gofrs/flock v0.0.0-20190320160742-5135e617513b/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gogo/protobuf v1.1.1 h1:72R+M5VuhED/KujmZVcIquuo8mBgX4oVda//DQb3PXo= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1 h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0 h1:28o5sBqPkBsMGnC6b4MvE2TzSr5/AT4c/1fLqVGIwlk= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0= +github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= +github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6 h1:YYWNAGTKWhKpcLLt7aSj/odlKrSrelQwlovBpDuf19w= +github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0= +github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw= +github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= +github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8= +github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o= +github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee h1:J2XAy40+7yz70uaOiMbNnluTg7gyQhtGqLQncQh+4J8= +github.com/golangci/gocyclo v0.0.0-20180528134321-2becd97e67ee/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU= +github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= +github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= +github.com/golangci/golangci-lint v1.25.0 h1:fwVdXtCBBCmk9e/7bTjkeCMx52bhq1IqmEQOVDbHXcg= +github.com/golangci/golangci-lint v1.25.0/go.mod h1:BaJNZmLU6vdaTLEGJKTTL/05I3B2OfXaD9SrNVkwr7w= +github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI= +github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU= +github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= +github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= +github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= +github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= +github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770 h1:EL/O5HGrF7Jaq0yNhBLucz9hTuRzj2LdwGBOaENgxIk= +github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= +github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 h1:leSNB7iYzLYSSx3J/s5sVf4Drkc68W2wm4Ixh/mr0us= +github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI= +github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg= +github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4= +github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= +github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3 h1:JVnpOZS+qxli+rgVl98ILOXVNbW+kb5wcxeGx8ShUIw= +github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jellevandenhooff/dkim v0.0.0-20150330215556-f50fe3d243e1/go.mod h1:E0B/fFc00Y+Rasa88328GlI/XbtyysCtTHZS8h7IrBU= +github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk= +github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s= +github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3 h1:jNYPNLe3d8smommaoQlK7LOA5ESyUJJ+Wf79ZtA7Vp4= +github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= +github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= +github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= +github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/maratori/testpackage v1.0.1 h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ= +github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= +github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE= +github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= +github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk= +github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mozilla/tls-observatory v0.0.0-20190404164649-a3c1b6cfecfd/go.mod h1:SrKMQvPiws7F7iqYp8/TX+IhxCYhzr6N/1yb8cwHsGk= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nakabonne/nestif v0.3.0 h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw= +github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c= +github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E= +github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/onsi/ginkgo v1.6.0 h1:Ix8l273rp3QzYgXSR+c8d1fTG7UPgYkOSELPhiY/YGw= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.11.0 h1:JAKSXpt1YjtLA7YpPiqO9ss6sNXEsPfSGdwN0UHqzrw= +github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.8.1 h1:C5Dqfs/LeauYDX0jJXIe2SWmwCbGzx9yF8C8xy3Lh34= +github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/ryancurrah/gomodguard v1.0.2 h1:vumZpZardqQ9EfFIZDNEpKaMxfqqEBMhu0uSRcDO5x4= +github.com/ryancurrah/gomodguard v1.0.2/go.mod h1:9T/Cfuxs5StfsocWr4WzDL36HqnX0fVb9d5fSEaLhoE= +github.com/securego/gosec v0.0.0-20200103095621-79fbf3af8d83 h1:AtnWoOvTioyDXFvu96MWEeE8qj4COSQnJogzLy/u41A= +github.com/securego/gosec v0.0.0-20200103095621-79fbf3af8d83/go.mod h1:vvbZ2Ae7AzSq3/kywjUDxSNq2SJ27RxCz2un0H3ePqE= +github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc= +github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc= +github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM= +github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041 h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc= +github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sourcegraph/go-diff v0.5.1 h1:gO6i5zugwzo1RVTvgvfwCOSVegNuvnNi6bAD1QCmkHs= +github.com/sourcegraph/go-diff v0.5.1/go.mod h1:j2dHj3m8aZgQO8lMTcTnBcXkRRRqi34cd2MNlA9u1mE= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= +github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/spf13/viper v1.6.1 h1:VPZzIkznI1YhVMRi6vNFLHSwhnhReBfgTxIPccpfdZk= +github.com/spf13/viper v1.6.1/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfDi5k= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= +github.com/tetafro/godot v0.2.5 h1:7+EYJM/Z4gYZhBFdRrVm6JTj5ZLw/QI1j4RfEOXJviE= +github.com/tetafro/godot v0.2.5/go.mod h1:pT6/T8+h6//L/LwQcFc4C0xpfy1euZwzS1sHdrFCms0= +github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e h1:RumXZ56IrCj4CL+g1b9OL/oH0QnsF976bC8xQFYUD5Q= +github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As= +github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ultraware/funlen v0.0.2 h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo= +github.com/ultraware/funlen v0.0.2/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= +github.com/ultraware/whitespace v0.0.4 h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg= +github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= +github.com/uudashr/gocognit v1.0.1 h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs= +github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.2.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s= +github.com/valyala/quicktemplate v1.2.0/go.mod h1:EH+4AkTd43SvgIbQHYu59/cJyxDoOVRUAfrukLPuGJ4= +github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= +golang.org/x/build v0.0.0-20200302185339-bb8466fe872a h1:aJVl+xDmB1VkCrLev/VX9jah/wJX/I58lUclpeX5zBY= +golang.org/x/build v0.0.0-20200302185339-bb8466fe872a/go.mod h1:AZUA4Q+VyW6vr2O+wetMY3SpcTqdCUrAcdQp3ueAxLs= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee h1:WG0RUwxtNT4qqaXX3DPA8zHFNm/D9xaBpxzHt1WcA/E= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478 h1:l5EDrHhldLYb3ZRHDUhXF7Om7MvYXnkV9/iQNo1lX6g= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b h1:0mm1VjtFUOIlE1SbDlwjYaDxZVDP2S5ou6y0gSgXHu8= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190122071731-054c452bb702/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69 h1:rOhMmluY6kLMhdnrivzec6lLgaVbMHMn2ISQXJeJ5EM= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5 h1:LfCXLvNmTYH9kEmVgqbnsWfruoXZIrh4YBgqVHtDvw0= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181117154741-2ddaf7f79a09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190122202912-9c309ee22fab/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200102140908-9497f49d5709 h1:AfG1EmoRkFK24HWWLxSrRKNg2G+oA3JVOG8GJsHWypQ= +golang.org/x/tools v0.0.0-20200102140908-9497f49d5709/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c h1:2EA2K0k9bcvvEDlqD8xdlOhCOqq+O/p9Voqi4x9W1YU= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e h1:3Dzrrxi54Io7Aoyb0PYLsI47K2TxkRQg+cqUn+m04do= +golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200207204624-4f3edf09f4f6/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno= +gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo= +gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +k8s.io/klog/v2 v2.0.0 h1:Foj74zO6RbjjP4hBEKjnYtjjAhGg4jNynUdYF6fJrok= +k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= +mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f h1:Cq7MalBHYACRd6EesksG1Q8EoIAKOsiZviGKbOLIej4= +mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4 h1:JPJh2pk3+X4lXAkZIk2RuE/7/FoK9maXw+TNPJhVS/c= +sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= diff --git a/hack/BUILD.bazel b/hack/BUILD.bazel new file mode 100644 index 0000000000..20b8a79a66 --- /dev/null +++ b/hack/BUILD.bazel @@ -0,0 +1,166 @@ +package(default_visibility = ["//visibility:public"]) + +test_suite( + name = "verify-all", + tags = ["lint"], # picks up all non-manual targets with this tag +) + +_BUILDIFIER = "@com_github_bazelbuild_buildtools//buildifier" + +_GAZELLE = "@bazel_gazelle//cmd/gazelle" + +_GO = "@go_sdk//:bin/go" + +_KAZEL = "@io_k8s_repo_infra//cmd/kazel" + +_GOLANGCI_LINT = "@com_github_golangci_golangci_lint//cmd/golangci-lint" + +sh_binary( + name = "update-bazel", + srcs = ["update-bazel.sh"], + args = [ + "$(location %s)" % _BUILDIFIER, + "$(location %s)" % _GAZELLE, + "$(location %s)" % _KAZEL, + ], + data = [ + _BUILDIFIER, + _GAZELLE, + _KAZEL, + ], +) + +sh_binary( + name = "update-deps", + srcs = ["update-deps.sh"], + args = [ + "$(location %s)" % _GO, + "$(location :update-bazel)", + "$(location %s)" % _BUILDIFIER, + "$(location %s)" % _GAZELLE, + "$(location %s)" % _KAZEL, + ], + data = [ + _BUILDIFIER, + _GAZELLE, + _GO, + _KAZEL, + ":update-bazel", + ], +) + +sh_binary( + name = "update-gofmt", + srcs = ["update-gofmt.sh"], + args = ["$(location @go_sdk//:bin/gofmt)"], + data = ["@go_sdk//:bin/gofmt"], +) + +sh_test( + name = "verify-bazel", + srcs = ["verify-bazel.sh"], + args = [ + "$(location %s)" % _BUILDIFIER, + "$(location %s)" % _GAZELLE, + "$(location %s)" % _KAZEL, + ], + data = [ + "@//:all-srcs", + _BUILDIFIER, + _GAZELLE, + _KAZEL, + ], + tags = ["lint"], +) + +# TODO(fejta): refactor this some more +py_test( + name = "verify-boilerplate", + srcs = ["verify_boilerplate.py"], + args = ["--boilerplate-dir=$(location //verify/boilerplate:boilerplate.go.txt)"], + data = [ + "//verify/boilerplate:boilerplate.go.txt", + "//verify/boilerplate:templates", + "@//:all-srcs", + ], + main = "verify_boilerplate.py", + python_version = "PY3", + tags = ["lint"], +) + +py_test( + name = "verify_boilerplate_test", + srcs = [ + "verify_boilerplate.py", + "verify_boilerplate_test.py", + ], + data = [ + "//verify/boilerplate:templates", + "//verify/boilerplate:testdata", + ], + python_version = "PY3", +) + +sh_test( + name = "verify-deps", + srcs = ["verify-deps.sh"], + args = [ + "$(location :update-deps)", + "$(location %s)" % _GO, + "$(location :update-bazel)", + "$(location %s)" % _BUILDIFIER, + "$(location %s)" % _GAZELLE, + "$(location %s)" % _KAZEL, + ], + data = [ + _BUILDIFIER, + _GAZELLE, + _GO, + _KAZEL, + ":update-bazel", + ":update-deps", + "@//:all-srcs", + ], + tags = ["lint"], +) + +sh_test( + name = "verify-gofmt", + srcs = ["verify-gofmt.sh"], + args = ["$(location @go_sdk//:bin/gofmt)"], + data = [ + "@//:all-srcs", + "@go_sdk//:bin/gofmt", + ], + tags = ["lint"], +) + +sh_test( + name = "verify-golangci-lint", + srcs = ["verify-golangci-lint.sh"], + args = [ + "$(location %s)" % _GO, + "$(location %s)" % _GOLANGCI_LINT, + ], + data = [ + "@//:all-srcs", + _GO, + _GOLANGCI_LINT, + "@go_sdk//:files", + ], + tags = ["lint"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/hack/tools.go b/hack/tools.go new file mode 100644 index 0000000000..7f47f792ad --- /dev/null +++ b/hack/tools.go @@ -0,0 +1,25 @@ +// +build tools + +/* +Copyright 2019 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package hack + +import ( + _ "github.com/bazelbuild/bazel-gazelle/cmd/gazelle" + _ "github.com/bazelbuild/buildtools/buildifier" + _ "github.com/golangci/golangci-lint/cmd/golangci-lint" +) diff --git a/hack/update-bazel.sh b/hack/update-bazel.sh new file mode 100755 index 0000000000..8ad180d171 --- /dev/null +++ b/hack/update-bazel.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +if [[ -n "${BUILD_WORKSPACE_DIRECTORY:-}" ]]; then # Running inside bazel + echo "Updating bazel rules..." >&2 +elif ! command -v bazel &>/dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +else + ( + set -o xtrace + bazel run @io_k8s_repo_infra//hack:update-bazel + ) + exit 0 +fi + +buildifier=$(realpath "$1") +gazelle=$(realpath "$2") +kazel=$(realpath "$3") + +cd "$BUILD_WORKSPACE_DIRECTORY" + +if [[ ! -f go.mod ]]; then + echo "No module defined, see https://github.com/golang/go/wiki/Modules#how-to-define-a-module" >&2 + exit 1 +fi + +set -o xtrace +"$gazelle" fix --external=external +"$kazel" --cfg-path=./.kazelcfg.json +find . -name BUILD -o -name BUILD.bazel -o -name '*.bzl' -type f \ + \( -not -path '*/vendor/*' -prune \) \ + -exec "$buildifier" --mode=fix --lint=fix '{}' + diff --git a/hack/update-deps.sh b/hack/update-deps.sh new file mode 100755 index 0000000000..c1e81a6c4d --- /dev/null +++ b/hack/update-deps.sh @@ -0,0 +1,92 @@ +#!/bin/bash +# Copyright 2018 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Update vendor and bazel rules to match go.mod +# +# Usage: +# update-deps.sh [--patch|--minor] [packages] + +set -o nounset +set -o errexit +set -o pipefail + +if [[ -n "${BUILD_WORKSPACE_DIRECTORY:-}" ]]; then # Running inside bazel + echo "Updating modules..." >&2 +elif ! command -v bazel &>/dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +else + ( + set -o xtrace + bazel run @io_k8s_repo_infra//hack:update-deps -- "$@" + ) + exit 0 +fi + +go=$(realpath "$1") +export PATH=$(dirname "$go"):$PATH +buildifier=$(realpath "$3") +gazelle=$(realpath "$4") +kazel=$(realpath "$5") +update_bazel=( + $(realpath "$2") + "$buildifier" + "$gazelle" + "$kazel" +) + +shift 5 + +cd "$BUILD_WORKSPACE_DIRECTORY" +trap 'echo "FAILED" >&2' ERR + +export GO111MODULE=on +export GOPROXY=https://proxy.golang.org +export GOSUMDB=sum.golang.org +mode="${1:-}" +shift || true +case "$mode" in +--minor) + if [[ -z "$@" ]]; then + "$go" get -u ./... + else + "$go" get -u "$@" + fi + ;; +--patch) + if [[ -z "$@" ]]; then + "$go" get -u=patch ./... + else + "$go" get -u=patch "$@" + fi + ;; +"") + # Just validate, or maybe manual go.mod edit + ;; +*) + echo "Usage: $(basename "$0") [--patch|--minor] [packages]" >&2 + exit 1 + ;; +esac + +rm -rf vendor +"$go" mod tidy +"$gazelle" update-repos \ + --from_file=go.mod --to_macro=repos.bzl%go_repositories \ + --build_file_generation=on --build_file_proto_mode=disable \ + --prune +"${update_bazel[@]}" # TODO(fejta): do we still need to do this? +echo "SUCCESS: updated modules" diff --git a/hack/update-gofmt.sh b/hack/update-gofmt.sh new file mode 100755 index 0000000000..78997fab34 --- /dev/null +++ b/hack/update-gofmt.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +if [[ -n "${BUILD_WORKSPACE_DIRECTORY:-}" ]]; then # Running inside bazel + echo "Updating gofmt..." >&2 +elif ! command -v bazel &>/dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +else + ( + set -o xtrace + bazel run @io_k8s_repo_infra//hack:update-gofmt + ) + exit 0 +fi + +gofmt=$PWD/$1 +cd "$BUILD_WORKSPACE_DIRECTORY" +find . -name "*.go" \( -not -path '*/vendor/*' -prune \) -exec "$gofmt" -s -w '{}' + diff --git a/hack/verify-bazel.sh b/hack/verify-bazel.sh new file mode 100755 index 0000000000..0f999399c0 --- /dev/null +++ b/hack/verify-bazel.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + + +fail() { + echo "ERROR: $1. Fix with:" >&2 + echo " bazel run @io_k8s_repo_infra//hack:update-bazel" >&2 + exit 1 +} + +if [[ -n "${TEST_WORKSPACE:-}" ]]; then # Running inside bazel + echo "Validating bazel rules..." >&2 +elif ! command -v bazel &> /dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +elif ! bazel query @//:all-srcs &>/dev/null; then + fail "bazel rules need bootstrapping" +else + ( + set -o xtrace + bazel test --test_output=streamed @io_k8s_repo_infra//hack:verify-bazel + ) + exit 0 +fi + +buildifier=$1 +gazelle=$2 +kazel=$3 + +gazelle_diff=$("$gazelle" fix --mode=diff --external=external || echo "ERROR: gazelle diffs") +kazel_diff=$("$kazel" --dry-run --print-diff --cfg-path=./.kazelcfg.json || echo "ERROR: kazel diffs") +# TODO(fejta): --mode=diff --lint=warn +buildifier_diff=$(find . \ + -name BUILD -o -name BUILD.bazel -o -name '*.bzl' -type f \ + \( -not -path '*/vendor/*' -prune \) \ + -exec "$buildifier" --mode=diff '{}' + 2>&1 || echo "ERROR: found buildifier diffs") + +if [[ -n "${gazelle_diff}${kazel_diff}${buildifier_diff}" ]]; then + echo "Current rules (-) do not match expected (+):" >&2 + echo "gazelle diff:" + echo "${gazelle_diff}" + echo "kazel diff:" + echo "${kazel_diff}" + echo "buildifier diff:" + echo "$buildifier_diff" + echo + fail "bazel rules out of date" +fi diff --git a/hack/verify-deps.sh b/hack/verify-deps.sh new file mode 100755 index 0000000000..64f4b9cc1c --- /dev/null +++ b/hack/verify-deps.sh @@ -0,0 +1,75 @@ +#!/bin/bash +# Copyright 2018 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o nounset +set -o errexit +set -o pipefail + +fail() { + echo "ERROR: $1. Fix with:" >&2 + echo " bazel run @io_k8s_repo_infra//hack:update-deps" >&2 + exit 1 +} + + +if [[ -n "${TEST_WORKSPACE:-}" ]]; then # Running inside bazel + echo "Checking modules for changes..." >&2 +elif ! command -v bazel &>/dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +elif ! bazel query @//:all-srcs &>/dev/null; then + fail "bazel rules need bootstrapping" +else + ( + set -o xtrace + bazel test --test_output=streamed @io_k8s_repo_infra//hack:verify-deps + ) + exit 0 +fi + + +tmpfiles=$TEST_TMPDIR/files + +( + mkdir -p "$tmpfiles" + rm -f bazel-* + cp -aL "." "$tmpfiles" + export BUILD_WORKSPACE_DIRECTORY=$tmpfiles + export HOME=$(realpath "$TEST_TMPDIR/home") + unset GOPATH + go=$(realpath "$2") + export PATH=$(dirname "$go"):$PATH + "$@" +) + +( + # Remove the platform/binary for gazelle and kazel + gazelle=$(dirname "$3") + kazel=$(dirname "$4") + rm -rf {.,"$tmpfiles"}/{"$gazelle","$kazel"} +) +# Avoid diff -N so we handle empty files correctly +diff=$(diff -upr \ + -x ".git" \ + -x "bazel-*" \ + -x "_output" \ + "." "$tmpfiles" 2>/dev/null || true) + +if [[ -n "${diff}" ]]; then + echo "${diff}" >&2 + echo >&2 + fail "modules changed" +fi +echo "SUCCESS: modules up-to-date" diff --git a/hack/verify-gofmt.sh b/hack/verify-gofmt.sh new file mode 100755 index 0000000000..a15803d402 --- /dev/null +++ b/hack/verify-gofmt.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +if [[ -n "${TEST_WORKSPACE:-}" ]]; then # Running inside bazel + echo "Validating gofmt..." >&2 +elif ! command -v bazel &> /dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +else + ( + set -o xtrace + bazel test --test_output=streamed @io_k8s_repo_infra//hack:verify-gofmt + ) + exit 0 +fi + +gofmt="$1" +diff=$(find . -name "*.go" \( -not -path '*/vendor/*' -prune \) -exec "$gofmt" -s -d '{}' +) +if [[ -z "$diff" ]]; then + exit 0 +fi + +echo "$diff" +echo +echo "ERROR: found unformatted go files, fix with:" >&2 +echo " bazel run @io_k8s_repo_infra//hack:update-gofmt" >&2 +exit 1 diff --git a/hack/verify-golangci-lint.sh b/hack/verify-golangci-lint.sh new file mode 100755 index 0000000000..409e56f1fb --- /dev/null +++ b/hack/verify-golangci-lint.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +if [[ -n "${TEST_WORKSPACE:-}" ]]; then # Running inside bazel + echo "Verifying golangci-lint..." >&2 +elif ! command -v bazel &> /dev/null; then + echo "Install bazel at https://bazel.build" >&2 + exit 1 +else + ( + set -o xtrace + bazel test --test_output=streamed @io_k8s_repo_infra//hack:verify-golangci-lint + ) + exit 0 +fi + +trap 'echo ERROR: golangci-lint failed >&2' ERR + +if [[ ! -f .golangci.yml ]]; then + echo 'ERROR: missing .golangci.yml in repo root' >&2 + exit 1 +fi + +golangci_lint=$2 +export GO111MODULE=on +export GOPROXY=https://proxy.golang.org +export GOSUMDB=sum.golang.org +export HOME=$TEST_TMPDIR/home +export GOPATH=$HOME/go +PATH=$(dirname "$1"):$PATH +export PATH +shift 2 +"$golangci_lint" run "$@" diff --git a/hack/verify_boilerplate.py b/hack/verify_boilerplate.py new file mode 100755 index 0000000000..62c584877e --- /dev/null +++ b/hack/verify_boilerplate.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python3 + +# Copyright 2015 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Verifies that all source files contain the necessary copyright boilerplate +# snippet. + +import argparse +import datetime +import glob +import os +import re +import sys + + +def get_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "filenames", help="list of files to check, all files if unspecified", nargs='*') + + rootdir = os.path.abspath('.') + parser.add_argument( + "--rootdir", default=rootdir, help="root directory to examine") + + default_boilerplate_dir = os.path.join(rootdir, "verify/boilerplate") + parser.add_argument("--boilerplate-dir", default=default_boilerplate_dir) + + parser.add_argument( + '--skip', + default=[ + 'external/bazel_tools', + '.git', + 'node_modules', + '_output', + 'third_party', + 'vendor', + 'verify/boilerplate/test', + ], + action='append', + help='Customize paths to avoid', + ) + return parser.parse_args() + + +def get_refs(): + refs = {} + + template_dir = ARGS.boilerplate_dir + if not os.path.isdir(template_dir): + template_dir = os.path.dirname(template_dir) + for path in glob.glob(os.path.join(template_dir, "boilerplate.*.txt")): + extension = os.path.basename(path).split(".")[1] + + # Pass the encoding parameter to avoid ascii decode error for some + # platform. + ref_file = open(path, 'r', encoding='utf-8') + ref = ref_file.read().splitlines() + ref_file.close() + refs[extension] = ref + + return refs + + +GENERATED_GO_MARKERS = [ + "// Code generated by client-gen. DO NOT EDIT.", + "// Code generated by counterfeiter. DO NOT EDIT.", + "// Code generated by deepcopy-gen. DO NOT EDIT.", + "// Code generated by informer-gen. DO NOT EDIT.", + "// Code generated by lister-gen. DO NOT EDIT.", + "// Code generated by protoc-gen-go. DO NOT EDIT.", +] + +# given the file contents, return true if the file appears to be generated + + +def is_generated(data): + for marker in GENERATED_GO_MARKERS: + if marker in data: + return True + return False + + +def file_passes(filename, refs, regexs): # pylint: disable=too-many-locals + try: + # Pass the encoding parameter to avoid ascii decode error for some + # platform. + with open(filename, 'r', encoding='utf-8') as fp: + data = fp.read() + except IOError: + return False + + if not data: + return True # Nothing to copyright in this empty file. + + basename = os.path.basename(filename) + extension = file_extension(filename) + if extension != "": + ref = refs[extension] + else: + ref = refs[basename] + + # check for and skip generated files + if is_generated(data): + return True + + # remove build tags from the top of Go files + if extension == "go": + con = regexs["go_build_constraints"] + (data, found) = con.subn("", data, 1) + + # remove shebang from the top of shell files + if extension in ("sh", "py"): + she = regexs["shebang"] + (data, found) = she.subn("", data, 1) + + data = data.splitlines() + + # if our test file is smaller than the reference it surely fails! + if len(ref) > len(data): + return False + + # trim our file to the same number of lines as the reference file + data = data[:len(ref)] + + year = regexs["year"] + for datum in data: + if year.search(datum): + return False + + # Replace all occurrences of the regex "2017|2016|2015|2014" with "YEAR" + when = regexs["date"] + for idx, datum in enumerate(data): + (data[idx], found) = when.subn('YEAR', datum) + if found != 0: + break + + # if we don't match the reference at this point, fail + if ref != data: + return False + + return True + + +def file_extension(filename): + return os.path.splitext(filename)[1].split(".")[-1].lower() + + +# even when generated by bazel we will complain about some generated files +# not having the headers. since they're just generated, ignore them +IGNORE_HEADERS = [ + '// Code generated by go-bindata.' +] + + +def has_ignored_header(pathname): + # Pass the encoding parameter to avoid ascii decode error for some + # platform. + with open(pathname, 'r', encoding='utf-8') as myfile: + data = myfile.read() + for header in IGNORE_HEADERS: + if data.startswith(header): + return True + return False + + +def normalize_files(files): + newfiles = [] + for pathname in files: + if any(x in pathname for x in ARGS.skip): + continue + newfiles.append(pathname) + for idx, pathname in enumerate(newfiles): + if not os.path.isabs(pathname): + newfiles[idx] = os.path.join(ARGS.rootdir, pathname) + return newfiles + + +def get_files(extensions): + files = [] + if ARGS.filenames: + files = ARGS.filenames + else: + for root, dirs, walkfiles in os.walk(ARGS.rootdir): + # don't visit certain dirs. This is just a performance improvement + # as we would prune these later in normalize_files(). But doing it + # cuts down the amount of filesystem walking we do and cuts down + # the size of the file list + for dpath in ARGS.skip: + if dpath in dirs: + dirs.remove(dpath) + + for name in walkfiles: + pathname = os.path.join(root, name) + files.append(pathname) + + files = normalize_files(files) + outfiles = [] + for pathname in files: + basename = os.path.basename(pathname) + extension = file_extension(pathname) + if extension in extensions or basename in extensions: + if not has_ignored_header(pathname): + outfiles.append(pathname) + return outfiles + + +def get_dates(): + years = datetime.datetime.now().year + return '(%s)' % '|'.join((str(year) for year in range(2014, years + 1))) + + +def get_regexs(): + regexs = {} + # Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing + regexs["year"] = re.compile('YEAR') + # dates can be any year between 2014 and the current year, company holder names can be anything + regexs["date"] = re.compile(get_dates()) + # strip // +build \n\n build constraints + regexs["go_build_constraints"] = re.compile( + r"^(// \+build.*\n)+\n", re.MULTILINE) + # strip #!.* from shell/python scripts + regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE) + return regexs + + +def nonconforming_lines(files): + yield '%d files have incorrect boilerplate headers:' % len(files) + for fp in files: + yield os.path.relpath(fp, ARGS.rootdir) + + +def main(): + regexs = get_regexs() + refs = get_refs() + filenames = get_files(refs.keys()) + nonconforming_files = [] + for filename in sorted(filenames): + if not file_passes(filename, refs, regexs): + nonconforming_files.append(filename) + + if nonconforming_files: + for line in nonconforming_lines(nonconforming_files): + print(line) + sys.exit(1) + + +if __name__ == "__main__": + ARGS = get_args() + main() diff --git a/hack/verify_boilerplate_test.py b/hack/verify_boilerplate_test.py new file mode 100644 index 0000000000..80cada520a --- /dev/null +++ b/hack/verify_boilerplate_test.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os +import sys +import unittest + +import verify_boilerplate + +class TestBoilerplate(unittest.TestCase): + + def setUp(self): + self.old_cwd = os.getcwd() + if os.getenv('TEST_WORKSPACE'): # Running in bazel + os.chdir('verify/boilerplate') + os.chdir('test/') + self.old_out = sys.stdout + sys.stdout = io.StringIO() + + def tearDown(self): + sys.stdout = self.old_out + os.chdir(self.old_cwd) + + def test_boilerplate(self): + + class Args(object): + def __init__(self): + self.filenames = [] + self.rootdir = '.' + self.boilerplate_dir = '../' + self.skip = [] + self.verbose = True + + verify_boilerplate.ARGS = Args() + with self.assertRaises(SystemExit): + verify_boilerplate.main() + + output = sys.stdout.getvalue() + expected = '\n'.join(verify_boilerplate.nonconforming_lines([ + './fail.go', + './fail.py', + ])) + '\n' # add trailing newline + + self.assertEquals(output, expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/load.bzl b/load.bzl new file mode 100644 index 0000000000..d85bfa0a36 --- /dev/null +++ b/load.bzl @@ -0,0 +1,115 @@ +# Copyright 2019 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +def repositories(): + if not native.existing_rule("subpar"): + http_archive( + name = "subpar", + urls = ["https://github.com/google/subpar/archive/2.0.0.tar.gz"], + sha256 = "b80297a1b8d38027a86836dbadc22f55dc3ecad56728175381aa6330705ac10f", + strip_prefix = "subpar-2.0.0", + ) + + # https://github.com/bazelbuild/bazel-skylib/releases + if not native.existing_rule("bazel_skylib"): + http_archive( + name = "bazel_skylib", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz", + ], + sha256 = "97e70364e9249702246c0e9444bccdc4b847bed1eb03c5a3ece4f83dfe6abc44", + ) + + # https://github.com/bazelbuild/bazel-toolchains/releases + if not native.existing_rule("bazel_toolchains"): + http_archive( + name = "bazel_toolchains", + sha256 = "a802b753e127a6f73f3f300db5dd83fb618cd798bc880b6a87db9a8777b7939f", + strip_prefix = "bazel-toolchains-3.3.0", + urls = [ + "https://github.com/bazelbuild/bazel-toolchains/releases/download/3.3.0/bazel-toolchains-3.3.0.tar.gz", + "https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/3.3.0.tar.gz", + ], + ) + + if not native.existing_rule("com_google_protobuf"): + http_archive( + name = "com_google_protobuf", + sha256 = "a79d19dcdf9139fa4b81206e318e33d245c4c9da1ffed21c87288ed4380426f9", + strip_prefix = "protobuf-3.11.4", + urls = [ + "https://mirror.bazel.build/github.com/protocolbuffers/protobuf/archive/v3.11.4.tar.gz", + "https://github.com/protocolbuffers/protobuf/archive/v3.11.4.tar.gz", + ], + ) + + # Check https://github.com/bazelbuild/rules_go/releases for new releases + # 0.22.6 supports Golang 1.14.4 and 1.13.12 + if not native.existing_rule("io_bazel_rules_go"): + http_archive( + name = "io_bazel_rules_go", + sha256 = "e0d2e3d92ef8b3704f26ac19231ef9aba66c8a3bdec4aca91a22ad7d6e6f3ef7", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.22.6/rules_go-v0.22.6.tar.gz", + "https://github.com/bazelbuild/rules_go/releases/download/v0.22.6/rules_go-v0.22.6.tar.gz", + ], + ) + + # https://github.com/bazelbuild/bazel-gazelle#running-gazelle-with-bazel + # v0.21 needs rules_go 0.23 + if not native.existing_rule("bazel_gazelle"): + http_archive( + name = "bazel_gazelle", + #sha256 = "bfd86b3cbe855d6c16c6fce60d76bd51f5c8dbc9cfcaef7a2bb5c1aafd0710e8", + sha256 = "d8c45ee70ec39a57e7a05e5027c32b1576cc7f16d9dd37135b0eddde45cf1b10", + urls = [ + "https://storage.googleapis.com/bazel-mirror/github.com/bazelbuild/bazel-gazelle/releases/download/v0.20.0/bazel-gazelle-v0.20.0.tar.gz", + "https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.20.0/bazel-gazelle-v0.20.0.tar.gz", + ], + ) + + # https://github.com/bazelbuild/rules_proto#getting-started + if not native.existing_rule("rules_proto"): + http_archive( + name = "rules_proto", + sha256 = "602e7161d9195e50246177e7c55b2f39950a9cf7366f74ed5f22fd45750cd208", + strip_prefix = "rules_proto-97d8af4dc474595af3900dd85cb3a29ad28cc313", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz", + "https://github.com/bazelbuild/rules_proto/archive/97d8af4dc474595af3900dd85cb3a29ad28cc313.tar.gz", + ], + ) + + # https://github.com/bazelbuild/buildtools/releases + # TODO(fejta): kazel needs a fix for 3.0.0 + if not native.existing_rule("com_github_bazelbuild_buildtools"): + http_archive( + name = "com_github_bazelbuild_buildtools", + sha256 = "7e9603607769f48e67dad0b04c1311484fc437a989405acc8462f3aa68e50eb0", + strip_prefix = "buildtools-2.2.1", + urls = [ + "https://github.com/bazelbuild/buildtools/archive/2.2.1.tar.gz", + ], + ) + + # https://github.com/bazelbuild/rules_nodejs/releases + if not native.existing_rule("bazel_build_rules_nodejs"): + http_archive( + name = "build_bazel_rules_nodejs", + sha256 = "d14076339deb08e5460c221fae5c5e9605d2ef4848eee1f0c81c9ffdc1ab31c1", + urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/1.6.1/rules_nodejs-1.6.1.tar.gz"], + ) diff --git a/presubmit.sh b/presubmit.sh new file mode 100755 index 0000000000..077efe2b01 --- /dev/null +++ b/presubmit.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Copyright 2019 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o nounset +set -o errexit +set -o pipefail + +cd "$(git rev-parse --show-toplevel)" + +if [[ -n "${GOOGLE_APPLICATION_CREDENTIALS:-}" ]]; then + echo "Service account detected. Adding --config=ci to bazel commands" >&2 + mkdir -p "$HOME" + touch "$HOME/.bazelrc" + echo "build --config=ci" >> "$HOME/.bazelrc" +fi +( + set -o xtrace + bazel test //... # This also builds everything + ./verify/verify-boilerplate.sh --rootdir="$(pwd)" -v # TODO(fejta) migrate to bazel +) diff --git a/repos.bzl b/repos.bzl new file mode 100644 index 0000000000..5f410b489e --- /dev/null +++ b/repos.bzl @@ -0,0 +1,2004 @@ +# Copyright 2019 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Configures repositories required by repo-infra.""" + +load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository") +load("@bazel_skylib//lib:versions.bzl", "versions") +load("@bazel_toolchains//rules:rbe_repo.bzl", "rbe_autoconfig") +load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") +load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies") + +def configure(minimum_bazel_version = None, rbe_name = "rbe_default", go_version = None, nogo = None): + if minimum_bazel_version: # Allow an additional downstream constraint + versions.check(minimum_bazel_version = minimum_bazel_version) + versions.check(minimum_bazel_version = "2.2.0") # Minimum rules for this repo + if rbe_name: + rbe_autoconfig(name = rbe_name) + protobuf_deps() # No options + + go_rules_dependencies() # No options + go_register_toolchains(go_version = go_version, nogo = nogo) + + gazelle_dependencies() # TODO(fejta): go_sdk and go_repository_default_cache + +def repo_infra_go_repositories(): + go_repositories() + repo_infra_patches() + +def repo_infra_patches(): + # These require custom edits, please maintain + + go_repository( + name = "com_github_golang_protobuf", + build_file_generation = "on", + build_file_proto_mode = "disable_global", # Avoid import cyle + importpath = "github.com/golang/protobuf", + sum = "h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I=", + version = "v1.3.3", + ) + go_repository( + name = "org_golang_x_tools", # Must keep in sync with rules_go version + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/tools", + patch_args = ["-p1"], + patches = [ + "@io_bazel_rules_go//third_party:org_golang_x_tools-extras.patch", # Add go_tool_library targets + ], + sum = "h1:zE128a8BUJqwFqwi8LxUnOdV3eSOGIzDhiIV/QW8eXc=", + version = "v0.0.0-20200221191710-57f3fb51f507", + ) + +def go_repositories(): + """Packages used by go.mod, created by @io_k8s_repo_infra//hack:update-bazel.""" + go_repository( + name = "cc_mvdan_interfacer", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "mvdan.cc/interfacer", + sum = "h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=", + version = "v0.0.0-20180901003855-c20040233aed", + ) + go_repository( + name = "cc_mvdan_lint", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "mvdan.cc/lint", + sum = "h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=", + version = "v0.0.0-20170908181259-adc824a0674b", + ) + go_repository( + name = "cc_mvdan_unparam", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "mvdan.cc/unparam", + sum = "h1:Cq7MalBHYACRd6EesksG1Q8EoIAKOsiZviGKbOLIej4=", + version = "v0.0.0-20190720180237-d51796306d8f", + ) + go_repository( + name = "com_github_bazelbuild_bazel_gazelle", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/bazelbuild/bazel-gazelle", + sum = "h1:kRymV9q+24Mbeg25fJehw+gvrtVIlwZZAefOSUq4MzU=", + version = "v0.20.0", + ) + go_repository( + name = "com_github_bazelbuild_buildtools", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/bazelbuild/buildtools", + sum = "h1:F2UHxHXipTXxTmIKALHwAdNsvTPhSkgshcTNCMPJj1M=", + version = "v0.0.0-20200228172928-c9d9e342afdb", + ) + go_repository( + name = "com_github_burntsushi_toml", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/BurntSushi/toml", + sum = "h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=", + version = "v0.3.1", + ) + go_repository( + name = "com_github_davecgh_go_spew", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/davecgh/go-spew", + sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=", + version = "v1.1.1", + ) + go_repository( + name = "com_github_fatih_color", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/fatih/color", + sum = "h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=", + version = "v1.7.0", + ) + go_repository( + name = "com_github_fsnotify_fsnotify", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/fsnotify/fsnotify", + sum = "h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=", + version = "v1.4.7", + ) + go_repository( + name = "com_github_go_critic_go_critic", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-critic/go-critic", + sum = "h1:4DTQfT1wWwLg/hzxwD9bkdhDQrdJtxe6DUTadPlrIeE=", + version = "v0.4.1", + ) + go_repository( + name = "com_github_go_lintpack_lintpack", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-lintpack/lintpack", + sum = "h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0=", + version = "v0.5.2", + ) + go_repository( + name = "com_github_go_ole_go_ole", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-ole/go-ole", + sum = "h1:2lOsA72HgjxAuMlKpFiCbHTvu44PIVkZ5hqm3RSdI/E=", + version = "v1.2.1", + ) + go_repository( + name = "com_github_go_toolsmith_astcast", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/astcast", + sum = "h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_astcopy", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/astcopy", + sum = "h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_astequal", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/astequal", + sum = "h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_astfmt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/astfmt", + sum = "h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_astinfo", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/astinfo", + sum = "h1:wP6mXeB2V/d1P1K7bZ5vDUO3YqEzcvOREOxZPEu3gVI=", + version = "v0.0.0-20180906194353-9809ff7efb21", + ) + go_repository( + name = "com_github_go_toolsmith_astp", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/astp", + sum = "h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_pkgload", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/pkgload", + sum = "h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_strparse", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/strparse", + sum = "h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_toolsmith_typep", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-toolsmith/typep", + sum = "h1:zKymWyA1TRYvqYrYDrfEMZULyrhcnGY3x7LDKU2XQaA=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_gobwas_glob", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gobwas/glob", + sum = "h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=", + version = "v0.2.3", + ) + go_repository( + name = "com_github_gogo_protobuf", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gogo/protobuf", + sum = "h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE=", + version = "v1.2.1", + ) + go_repository( + name = "com_github_golang_mock", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golang/mock", + sum = "h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s=", + version = "v1.3.1", + ) + go_repository( + name = "com_github_golang_protobuf", + build_file_generation = "on", + build_file_proto_mode = "disable_global", # Avoid import cycle + importpath = "github.com/golang/protobuf", + sum = "h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I=", + version = "v1.3.3", + ) + go_repository( + name = "com_github_golangci_check", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/check", + sum = "h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0=", + version = "v0.0.0-20180506172741-cfe4005ccda2", + ) + go_repository( + name = "com_github_golangci_dupl", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/dupl", + sum = "h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=", + version = "v0.0.0-20180902072040-3e9179ac440a", + ) + go_repository( + name = "com_github_golangci_errcheck", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/errcheck", + sum = "h1:YYWNAGTKWhKpcLLt7aSj/odlKrSrelQwlovBpDuf19w=", + version = "v0.0.0-20181223084120-ef45e06d44b6", + ) + go_repository( + name = "com_github_golangci_go_misc", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/go-misc", + sum = "h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw=", + version = "v0.0.0-20180628070357-927a3d87b613", + ) + + go_repository( + name = "com_github_golangci_goconst", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/goconst", + sum = "h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8=", + version = "v0.0.0-20180610141641-041c5f2b40f3", + ) + go_repository( + name = "com_github_golangci_gocyclo", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/gocyclo", + sum = "h1:J2XAy40+7yz70uaOiMbNnluTg7gyQhtGqLQncQh+4J8=", + version = "v0.0.0-20180528134321-2becd97e67ee", + ) + go_repository( + name = "com_github_golangci_gofmt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/gofmt", + sum = "h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks=", + version = "v0.0.0-20190930125516-244bba706f1a", + ) + go_repository( + name = "com_github_golangci_golangci_lint", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/golangci-lint", + sum = "h1:fwVdXtCBBCmk9e/7bTjkeCMx52bhq1IqmEQOVDbHXcg=", + version = "v1.25.0", + ) + + go_repository( + name = "com_github_golangci_ineffassign", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/ineffassign", + sum = "h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI=", + version = "v0.0.0-20190609212857-42439a7714cc", + ) + go_repository( + name = "com_github_golangci_lint_1", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/lint-1", + sum = "h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA=", + version = "v0.0.0-20191013205115-297bf364a8e0", + ) + go_repository( + name = "com_github_golangci_maligned", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/maligned", + sum = "h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=", + version = "v0.0.0-20180506175553-b1d89398deca", + ) + go_repository( + name = "com_github_golangci_misspell", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/misspell", + sum = "h1:EL/O5HGrF7Jaq0yNhBLucz9hTuRzj2LdwGBOaENgxIk=", + version = "v0.0.0-20180809174111-950f5d19e770", + ) + go_repository( + name = "com_github_golangci_prealloc", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/prealloc", + sum = "h1:leSNB7iYzLYSSx3J/s5sVf4Drkc68W2wm4Ixh/mr0us=", + version = "v0.0.0-20180630174525-215b22d4de21", + ) + go_repository( + name = "com_github_golangci_revgrep", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/revgrep", + sum = "h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg=", + version = "v0.0.0-20180526074752-d9c87f5ffaf0", + ) + go_repository( + name = "com_github_golangci_unconvert", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golangci/unconvert", + sum = "h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=", + version = "v0.0.0-20180507085042-28b1c447d1f4", + ) + go_repository( + name = "com_github_google_go_cmp", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/go-cmp", + sum = "h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=", + version = "v0.4.0", + ) + go_repository( + name = "com_github_gostaticanalysis_analysisutil", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gostaticanalysis/analysisutil", + sum = "h1:JVnpOZS+qxli+rgVl98ILOXVNbW+kb5wcxeGx8ShUIw=", + version = "v0.0.0-20190318220348-4088753ea4d3", + ) + go_repository( + name = "com_github_hashicorp_hcl", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/hashicorp/hcl", + sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_hpcloud_tail", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/hpcloud/tail", + sum = "h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_inconshreveable_mousetrap", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/inconshreveable/mousetrap", + sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_kisielk_gotool", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/kisielk/gotool", + sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_klauspost_compress", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/klauspost/compress", + sum = "h1:8VMb5+0wMgdBykOV96DwNwKFQ+WTI4pzYURP99CcB9E=", + version = "v1.4.1", + ) + go_repository( + name = "com_github_klauspost_cpuid", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/klauspost/cpuid", + sum = "h1:NMpwD2G9JSFOE1/TJjGSo5zG7Yb2bTe7eq1jH+irmeE=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_kr_pretty", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/kr/pretty", + sum = "h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=", + version = "v0.1.0", + ) + go_repository( + name = "com_github_kr_pty", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/kr/pty", + sum = "h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI=", + version = "v1.1.8", + ) + go_repository( + name = "com_github_kr_text", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/kr/text", + sum = "h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=", + version = "v0.2.0", + ) + go_repository( + name = "com_github_logrusorgru_aurora", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/logrusorgru/aurora", + sum = "h1:9MlwzLdW7QSDrhDjFlsEYmxpFyIoXmYRon3dt0io31k=", + version = "v0.0.0-20181002194514-a7b3b318ed4e", + ) + go_repository( + name = "com_github_magiconair_properties", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/magiconair/properties", + sum = "h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=", + version = "v1.8.1", + ) + go_repository( + name = "com_github_mattn_go_colorable", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mattn/go-colorable", + sum = "h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=", + version = "v0.1.4", + ) + go_repository( + name = "com_github_mattn_go_isatty", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mattn/go-isatty", + sum = "h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE=", + version = "v0.0.8", + ) + go_repository( + name = "com_github_mattn_goveralls", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mattn/goveralls", + sum = "h1:7eJB6EqsPhRVxvwEXGnqdO2sJI0PTsrWoTMXEk9/OQc=", + version = "v0.0.2", + ) + go_repository( + name = "com_github_mitchellh_go_homedir", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mitchellh/go-homedir", + sum = "h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=", + version = "v1.1.0", + ) + go_repository( + name = "com_github_mitchellh_go_ps", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mitchellh/go-ps", + sum = "h1:9+ke9YJ9KGWw5ANXK6ozjoK47uI3uNbXv4YVINBnGm8=", + version = "v0.0.0-20190716172923-621e5597135b", + ) + go_repository( + name = "com_github_mitchellh_mapstructure", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mitchellh/mapstructure", + sum = "h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=", + version = "v1.1.2", + ) + go_repository( + name = "com_github_mozilla_tls_observatory", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mozilla/tls-observatory", + sum = "h1:Av0AX0PnAlPZ3AY2rQUobGFaZfE4KHVRdKWIEPvsCWY=", + version = "v0.0.0-20190404164649-a3c1b6cfecfd", + ) + go_repository( + name = "com_github_nbutton23_zxcvbn_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/nbutton23/zxcvbn-go", + sum = "h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=", + version = "v0.0.0-20180912185939-ae427f1e4c1d", + ) + go_repository( + name = "com_github_onsi_ginkgo", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/onsi/ginkgo", + sum = "h1:JAKSXpt1YjtLA7YpPiqO9ss6sNXEsPfSGdwN0UHqzrw=", + version = "v1.11.0", + ) + go_repository( + name = "com_github_onsi_gomega", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/onsi/gomega", + sum = "h1:C5Dqfs/LeauYDX0jJXIe2SWmwCbGzx9yF8C8xy3Lh34=", + version = "v1.8.1", + ) + go_repository( + name = "com_github_openpeedeep_depguard", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/OpenPeeDeeP/depguard", + sum = "h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us=", + version = "v1.0.1", + ) + go_repository( + name = "com_github_pelletier_go_toml", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/pelletier/go-toml", + sum = "h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_pkg_errors", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/pkg/errors", + sum = "h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=", + version = "v0.8.1", + ) + go_repository( + name = "com_github_pmezard_go_difflib", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/pmezard/go-difflib", + sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_quasilyte_go_consistent", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/quasilyte/go-consistent", + sum = "h1:JoUA0uz9U0FVFq5p4LjEq4C0VgQ0El320s3Ms0V4eww=", + version = "v0.0.0-20190521200055-c6f3937de18c", + ) + go_repository( + name = "com_github_rogpeppe_go_internal", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/rogpeppe/go-internal", + sum = "h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk=", + version = "v1.3.0", + ) + + go_repository( + name = "com_github_shirou_gopsutil", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/shirou/gopsutil", + sum = "h1:WokF3GuxBeL+n4Lk4Fa8v9mbdjlrl7bHuneF4N1bk2I=", + version = "v0.0.0-20190901111213-e4ec7b275ada", + ) + go_repository( + name = "com_github_shirou_w32", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/shirou/w32", + sum = "h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=", + version = "v0.0.0-20160930032740-bb4de0191aa4", + ) + go_repository( + name = "com_github_shurcool_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/shurcooL/go", + sum = "h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM=", + version = "v0.0.0-20180423040247-9e1955d9fb6e", + ) + go_repository( + name = "com_github_shurcool_go_goon", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/shurcooL/go-goon", + sum = "h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc=", + version = "v0.0.0-20170922171312-37c2f522c041", + ) + go_repository( + name = "com_github_sirupsen_logrus", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/sirupsen/logrus", + sum = "h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=", + version = "v1.4.2", + ) + go_repository( + name = "com_github_sourcegraph_go_diff", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/sourcegraph/go-diff", + sum = "h1:gO6i5zugwzo1RVTvgvfwCOSVegNuvnNi6bAD1QCmkHs=", + version = "v0.5.1", + ) + go_repository( + name = "com_github_spf13_afero", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spf13/afero", + sum = "h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=", + version = "v1.1.2", + ) + go_repository( + name = "com_github_spf13_cast", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spf13/cast", + sum = "h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=", + version = "v1.3.0", + ) + go_repository( + name = "com_github_spf13_cobra", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spf13/cobra", + sum = "h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=", + version = "v0.0.5", + ) + go_repository( + name = "com_github_spf13_jwalterweatherman", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spf13/jwalterweatherman", + sum = "h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_spf13_pflag", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spf13/pflag", + sum = "h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=", + version = "v1.0.5", + ) + go_repository( + name = "com_github_spf13_viper", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spf13/viper", + sum = "h1:VPZzIkznI1YhVMRi6vNFLHSwhnhReBfgTxIPccpfdZk=", + version = "v1.6.1", + ) + go_repository( + name = "com_github_stackexchange_wmi", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/StackExchange/wmi", + sum = "h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=", + version = "v0.0.0-20180116203802-5d049714c4a6", + ) + go_repository( + name = "com_github_stretchr_testify", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/stretchr/testify", + sum = "h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=", + version = "v1.5.1", + ) + go_repository( + name = "com_github_timakin_bodyclose", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/timakin/bodyclose", + sum = "h1:RumXZ56IrCj4CL+g1b9OL/oH0QnsF976bC8xQFYUD5Q=", + version = "v0.0.0-20190930140734-f7f2e9bca95e", + ) + go_repository( + name = "com_github_ultraware_funlen", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ultraware/funlen", + sum = "h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo=", + version = "v0.0.2", + ) + go_repository( + name = "com_github_valyala_bytebufferpool", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/valyala/bytebufferpool", + sum = "h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_valyala_fasthttp", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/valyala/fasthttp", + sum = "h1:dzZJf2IuMiclVjdw0kkT+f9u4YdrapbNyGAN47E/qnk=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_valyala_quicktemplate", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/valyala/quicktemplate", + sum = "h1:BaO1nHTkspYzmAjPXj0QiDJxai96tlcZyKcI9dyEGvM=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_valyala_tcplisten", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/valyala/tcplisten", + sum = "h1:0R4NLDRDZX6JcmhJgXi5E4b8Wg84ihbmUKp/GvSPEzc=", + version = "v0.0.0-20161114210144-ceec8f93295a", + ) + go_repository( + name = "com_sourcegraph_sqs_pbtypes", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "sourcegraph.com/sqs/pbtypes", + sum = "h1:JPJh2pk3+X4lXAkZIk2RuE/7/FoK9maXw+TNPJhVS/c=", + version = "v0.0.0-20180604144634-d3ebe8f20ae4", + ) + + go_repository( + name = "in_gopkg_check_v1", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/check.v1", + sum = "h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=", + version = "v1.0.0-20200227125254-8fa46927fb4f", + ) + go_repository( + name = "in_gopkg_errgo_v2", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/errgo.v2", + sum = "h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=", + version = "v2.1.0", + ) + go_repository( + name = "in_gopkg_fsnotify_v1", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/fsnotify.v1", + sum = "h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=", + version = "v1.4.7", + ) + + go_repository( + name = "in_gopkg_tomb_v1", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/tomb.v1", + sum = "h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=", + version = "v1.0.0-20141024135613-dd632973f1e7", + ) + go_repository( + name = "in_gopkg_yaml_v2", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/yaml.v2", + sum = "h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=", + version = "v2.2.8", + ) + go_repository( + name = "io_k8s_klog_v2", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "k8s.io/klog/v2", + sum = "h1:Foj74zO6RbjjP4hBEKjnYtjjAhGg4jNynUdYF6fJrok=", + version = "v2.0.0", + ) + go_repository( + name = "org_golang_x_build", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/build", + sum = "h1:aJVl+xDmB1VkCrLev/VX9jah/wJX/I58lUclpeX5zBY=", + version = "v0.0.0-20200302185339-bb8466fe872a", + ) + go_repository( + name = "org_golang_x_crypto", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/crypto", + sum = "h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8=", + version = "v0.0.0-20191011191535-87dc89f01550", + ) + go_repository( + name = "org_golang_x_net", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/net", + sum = "h1:0mm1VjtFUOIlE1SbDlwjYaDxZVDP2S5ou6y0gSgXHu8=", + version = "v0.0.0-20200226121028-0de0cce0169b", + ) + go_repository( + name = "org_golang_x_sync", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/sync", + sum = "h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY=", + version = "v0.0.0-20190911185100-cd5d95a43a6e", + ) + go_repository( + name = "org_golang_x_sys", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/sys", + sum = "h1:LfCXLvNmTYH9kEmVgqbnsWfruoXZIrh4YBgqVHtDvw0=", + version = "v0.0.0-20200202164722-d101bd2416d5", + ) + go_repository( + name = "org_golang_x_text", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/text", + sum = "h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=", + version = "v0.3.2", + ) + go_repository( + name = "org_golang_x_tools", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/tools", + patch_args = ["-p1"], + patches = [ + "@io_bazel_rules_go//third_party:org_golang_x_tools-extras.patch", # Add go_tool_library targets + ], + sum = "h1:3Dzrrxi54Io7Aoyb0PYLsI47K2TxkRQg+cqUn+m04do=", + version = "v0.0.0-20200422022333-3d57cf2e726e", + ) + go_repository( + name = "org_golang_x_xerrors", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/xerrors", + sum = "h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=", + version = "v0.0.0-20191204190536-9bdfabe68543", + ) + go_repository( + name = "com_github_armon_consul_api", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/armon/consul-api", + sum = "h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=", + version = "v0.0.0-20180202201655-eb2c6b5be1b6", + ) + go_repository( + name = "com_github_bazelbuild_rules_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/bazelbuild/rules_go", + sum = "h1:wzbawlkLtl2ze9w/312NHZ84c7kpUCtlkD8HgFY27sw=", + version = "v0.0.0-20190719190356-6dae44dc5cab", + ) + go_repository( + name = "com_github_coreos_etcd", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/coreos/etcd", + sum = "h1:jFneRYjIvLMLhDLCzuTuU4rSJUjRplcJQ7pD7MnhC04=", + version = "v3.3.10+incompatible", + ) + go_repository( + name = "com_github_coreos_go_etcd", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/coreos/go-etcd", + sum = "h1:bXhRBIXoTm9BYHS3gE0TtQuyNZyeEMux2sDi4oo5YOo=", + version = "v2.0.0+incompatible", + ) + go_repository( + name = "com_github_coreos_go_semver", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/coreos/go-semver", + sum = "h1:3Jm3tLmsgAYcjC+4Up7hJrFBPr+n7rAqYeSw/SZazuY=", + version = "v0.2.0", + ) + go_repository( + name = "com_github_cpuguy83_go_md2man", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/cpuguy83/go-md2man", + sum = "h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=", + version = "v1.0.10", + ) + go_repository( + name = "com_github_russross_blackfriday", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/russross/blackfriday", + sum = "h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=", + version = "v1.5.2", + ) + go_repository( + name = "com_github_ugorji_go_codec", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ugorji/go/codec", + sum = "h1:3SVOIvH7Ae1KRYyQWRjXWJEA9sS/c/pjvH++55Gr648=", + version = "v0.0.0-20181204163529-d75b2dcb6bc8", + ) + go_repository( + name = "com_github_xordataexchange_crypt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/xordataexchange/crypt", + sum = "h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=", + version = "v0.0.3-0.20170626215501-b2862e3d0a77", + ) + go_repository( + name = "com_github_go_logr_logr", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-logr/logr", + sum = "h1:M1Tv3VzNlEHg6uyACnRdtrploV2P7wZqH8BoQMtz0cg=", + version = "v0.1.0", + ) + go_repository( + name = "co_honnef_go_tools", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "honnef.co/go/tools", + sum = "h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=", + version = "v0.0.1-2019.2.3", + ) + go_repository( + name = "com_github_anmitsu_go_shlex", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/anmitsu/go-shlex", + sum = "h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=", + version = "v0.0.0-20161002113705-648efa622239", + ) + go_repository( + name = "com_github_bradfitz_go_smtpd", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/bradfitz/go-smtpd", + sum = "h1:ckJgFhFWywOx+YLEMIJsTb+NV6NexWICk5+AMSuz3ss=", + version = "v0.0.0-20170404230938-deb6d6237625", + ) + go_repository( + name = "com_github_client9_misspell", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/client9/misspell", + sum = "h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=", + version = "v0.3.4", + ) + go_repository( + name = "com_github_coreos_go_systemd", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/coreos/go-systemd", + sum = "h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=", + version = "v0.0.0-20190321100706-95778dfbb74e", + ) + go_repository( + name = "com_github_flynn_go_shlex", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/flynn/go-shlex", + sum = "h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=", + version = "v0.0.0-20150515145356-3f9db97f8568", + ) + go_repository( + name = "com_github_gliderlabs_ssh", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gliderlabs/ssh", + sum = "h1:j3L6gSLQalDETeEg/Jg0mGY0/y/N6zI2xX1978P0Uqw=", + version = "v0.1.1", + ) + go_repository( + name = "com_github_golang_glog", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golang/glog", + sum = "h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=", + version = "v0.0.0-20160126235308-23def4e6c14b", + ) + go_repository( + name = "com_github_google_btree", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/btree", + sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_google_go_github", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/go-github", + sum = "h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=", + version = "v17.0.0+incompatible", + ) + go_repository( + name = "com_github_google_go_querystring", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/go-querystring", + sum = "h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_google_martian", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/martian", + sum = "h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=", + version = "v2.1.0+incompatible", + ) + go_repository( + name = "com_github_google_pprof", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/pprof", + sum = "h1:DLpL8pWq0v4JYoRpEhDfsJhhJyGKCcQM2WPW2TJs31c=", + version = "v0.0.0-20191218002539-d4f498aebedc", + ) + go_repository( + name = "com_github_googleapis_gax_go_v2", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/googleapis/gax-go/v2", + sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=", + version = "v2.0.5", + ) + go_repository( + name = "com_github_gregjones_httpcache", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gregjones/httpcache", + sum = "h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM=", + version = "v0.0.0-20180305231024-9cad4c3443a7", + ) + go_repository( + name = "com_github_hashicorp_golang_lru", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/hashicorp/golang-lru", + sum = "h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=", + version = "v0.5.1", + ) + go_repository( + name = "com_github_jellevandenhooff_dkim", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jellevandenhooff/dkim", + sum = "h1:ujPKutqRlJtcfWk6toYVYagwra7HQHbXOaS171b4Tg8=", + version = "v0.0.0-20150330215556-f50fe3d243e1", + ) + go_repository( + name = "com_github_jstemmer_go_junit_report", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jstemmer/go-junit-report", + sum = "h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=", + version = "v0.9.1", + ) + go_repository( + name = "com_github_tarm_serial", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/tarm/serial", + sum = "h1:UyzmZLoiDWMRywV4DUYb9Fbt8uiOSooupjTq10vpvnU=", + version = "v0.0.0-20180830185346-98f6abe2eb07", + ) + go_repository( + name = "com_google_cloud_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "cloud.google.com/go", + sum = "h1:GGslhk/BU052LPlnI1vpp3fcbUs+hQ3E+Doti/3/vF8=", + version = "v0.52.0", + ) + go_repository( + name = "in_gopkg_inf_v0", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/inf.v0", + sum = "h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=", + version = "v0.9.1", + ) + go_repository( + name = "io_opencensus_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "go.opencensus.io", + sum = "h1:75k/FF0Q2YM8QYo07VPddOLBslDt1MZOdEslOHvmzAs=", + version = "v0.22.2", + ) + go_repository( + name = "org_go4", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "go4.org", + sum = "h1:+hE86LblG4AyDgwMCLTE6FOlM9+qjHSYS+rKqxUVdsM=", + version = "v0.0.0-20180809161055-417644f6feb5", + ) + go_repository( + name = "org_go4_grpc", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "grpc.go4.org", + sum = "h1:tmXTu+dfa+d9Evp8NpJdgOy6+rt8/x4yG7qPBrtNfLY=", + version = "v0.0.0-20170609214715-11d0a25b4919", + ) + go_repository( + name = "org_golang_google_api", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "google.golang.org/api", + sum = "h1:0q95w+VuFtv4PAx4PZVQdBMmYbaCHbnfKaEiDIcVyag=", + version = "v0.17.0", + ) + go_repository( + name = "org_golang_google_appengine", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "google.golang.org/appengine", + sum = "h1:tycE03LOZYQNhDpS27tcQdAzLCVMaj7QT2SXxebnpCM=", + version = "v1.6.5", + ) + go_repository( + name = "org_golang_google_genproto", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "google.golang.org/genproto", + sum = "h1:tirixpud1WdjE3/NrL9ar4ot0ADfwls8sOcIf1ivRDw=", + version = "v0.0.0-20200207204624-4f3edf09f4f6", + ) + go_repository( + name = "org_golang_google_grpc", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "google.golang.org/grpc", + sum = "h1:zvIju4sqAGvwKspUQOhwnpcqSbzi7/H6QomNNjTL4sk=", + version = "v1.27.1", + ) + go_repository( + name = "org_golang_x_exp", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/exp", + sum = "h1:zQpM52jfKHG6II1ISZY1ZcpygvuSFZpLwfluuF89XOg=", + version = "v0.0.0-20191227195350-da58074b4299", + ) + go_repository( + name = "org_golang_x_lint", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/lint", + sum = "h1:J5lckAjkw6qYlOZNj90mLYNTEKDvWeuc1yieZ8qUzUE=", + version = "v0.0.0-20191125180803-fdd1cda4f05f", + ) + go_repository( + name = "org_golang_x_oauth2", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/oauth2", + sum = "h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw=", + version = "v0.0.0-20200107190931-bf48bf16ab8d", + ) + go_repository( + name = "org_golang_x_perf", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/perf", + sum = "h1:xYq6+9AtI+xP3M4r0N1hCkHrInHDBohhquRgx9Kk6gI=", + version = "v0.0.0-20180704124530-6e6d33e29852", + ) + go_repository( + name = "org_golang_x_time", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/time", + sum = "h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ=", + version = "v0.0.0-20190308202827-9d24e82272b4", + ) + go_repository( + name = "com_github_alecthomas_template", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/alecthomas/template", + sum = "h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=", + version = "v0.0.0-20160405071501-a0175ee3bccc", + ) + go_repository( + name = "com_github_alecthomas_units", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/alecthomas/units", + sum = "h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=", + version = "v0.0.0-20151022065526-2efee857e7cf", + ) + go_repository( + name = "com_github_beorn7_perks", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/beorn7/perks", + sum = "h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0=", + version = "v1.0.0", + ) + + go_repository( + name = "com_github_cespare_xxhash", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/cespare/xxhash", + sum = "h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=", + version = "v1.1.0", + ) + go_repository( + name = "com_github_coreos_bbolt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/coreos/bbolt", + sum = "h1:wZwiHHUieZCquLkDL0B8UhzreNWsPHooDAG3q34zk0s=", + version = "v1.3.2", + ) + go_repository( + name = "com_github_coreos_pkg", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/coreos/pkg", + sum = "h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=", + version = "v0.0.0-20180928190104-399ea9e2e55f", + ) + go_repository( + name = "com_github_creack_pty", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/creack/pty", + sum = "h1:uDmaGzcdjhF4i/plgjmEsriH11Y0o7RKapEf/LDaM3w=", + version = "v1.1.9", + ) + go_repository( + name = "com_github_dgrijalva_jwt_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/dgrijalva/jwt-go", + sum = "h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=", + version = "v3.2.0+incompatible", + ) + go_repository( + name = "com_github_dgryski_go_sip13", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/dgryski/go-sip13", + sum = "h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=", + version = "v0.0.0-20181026042036-e10d5fee7954", + ) + go_repository( + name = "com_github_ghodss_yaml", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ghodss/yaml", + sum = "h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_go_kit_kit", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-kit/kit", + sum = "h1:Wz+5lgoB0kkuqLEc6NVmwRknTKP6dTGbSqvhZtBI/j0=", + version = "v0.8.0", + ) + go_repository( + name = "com_github_go_logfmt_logfmt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-logfmt/logfmt", + sum = "h1:MP4Eh7ZCb31lleYCFuwm0oe4/YGak+5l1vA2NOE80nA=", + version = "v0.4.0", + ) + go_repository( + name = "com_github_go_stack_stack", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-stack/stack", + sum = "h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=", + version = "v1.8.0", + ) + go_repository( + name = "com_github_gofrs_flock", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gofrs/flock", + sum = "h1:ekuhfTjngPhisSjOJ0QWKpPQE8/rbknHaes6WVJj5Hw=", + version = "v0.0.0-20190320160742-5135e617513b", + ) + go_repository( + name = "com_github_golang_groupcache", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/golang/groupcache", + sum = "h1:5ZkaAPbicIKTF2I64qf5Fh8Aa83Q/dnOafMYV0OMwjA=", + version = "v0.0.0-20191227052852-215e87163ea7", + ) + go_repository( + name = "com_github_google_renameio", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/renameio", + sum = "h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=", + version = "v0.1.0", + ) + go_repository( + name = "com_github_gorilla_websocket", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gorilla/websocket", + sum = "h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=", + version = "v1.4.0", + ) + go_repository( + name = "com_github_grpc_ecosystem_go_grpc_middleware", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/grpc-ecosystem/go-grpc-middleware", + sum = "h1:Iju5GlWwrvL6UBg4zJJt3btmonfrMlCDdsejg4CZE7c=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_grpc_ecosystem_go_grpc_prometheus", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/grpc-ecosystem/go-grpc-prometheus", + sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_grpc_ecosystem_grpc_gateway", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/grpc-ecosystem/grpc-gateway", + sum = "h1:bM6ZAFZmc/wPFaRDi0d5L7hGEZEx/2u+Tmr2evNHDiI=", + version = "v1.9.0", + ) + go_repository( + name = "com_github_jonboulle_clockwork", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jonboulle/clockwork", + sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=", + version = "v0.1.0", + ) + go_repository( + name = "com_github_julienschmidt_httprouter", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/julienschmidt/httprouter", + sum = "h1:TDTW5Yz1mjftljbcKqRcrYhd4XeOoI98t+9HbQbYf7g=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_kisielk_errcheck", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/kisielk/errcheck", + sum = "h1:ZqfnKyx9KGpRcW04j5nnPDgRgoXUeLh2YFBeFzphcA0=", + version = "v1.1.0", + ) + go_repository( + name = "com_github_konsorten_go_windows_terminal_sequences", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/konsorten/go-windows-terminal-sequences", + sum = "h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=", + version = "v1.0.1", + ) + go_repository( + name = "com_github_kr_logfmt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/kr/logfmt", + sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=", + version = "v0.0.0-20140226030751-b84e30acd515", + ) + go_repository( + name = "com_github_lib_pq", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/lib/pq", + sum = "h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_matoous_godox", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/matoous/godox", + sum = "h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE=", + version = "v0.0.0-20190911065817-5d6d842e92eb", + ) + go_repository( + name = "com_github_matttproud_golang_protobuf_extensions", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/matttproud/golang_protobuf_extensions", + sum = "h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=", + version = "v1.0.1", + ) + go_repository( + name = "com_github_mwitkow_go_conntrack", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mwitkow/go-conntrack", + sum = "h1:F9x/1yl3T2AeKLr2AMdilSD8+f9bvMnNN8VS5iDtovc=", + version = "v0.0.0-20161129095857-cc309e4a2223", + ) + go_repository( + name = "com_github_oklog_ulid", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/oklog/ulid", + sum = "h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=", + version = "v1.3.1", + ) + go_repository( + name = "com_github_oneofone_xxhash", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/OneOfOne/xxhash", + sum = "h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=", + version = "v1.2.2", + ) + go_repository( + name = "com_github_prometheus_client_golang", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/prometheus/client_golang", + sum = "h1:9iH4JKXLzFbOAdtqv/a+j8aewx2Y8lAjAydhbaScPF8=", + version = "v0.9.3", + ) + go_repository( + name = "com_github_prometheus_client_model", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/prometheus/client_model", + sum = "h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM=", + version = "v0.0.0-20190812154241-14fe0d1b01d4", + ) + go_repository( + name = "com_github_prometheus_common", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/prometheus/common", + sum = "h1:7etb9YClo3a6HjLzfl6rIQaU+FDfi0VSX39io3aQ+DM=", + version = "v0.4.0", + ) + go_repository( + name = "com_github_prometheus_procfs", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/prometheus/procfs", + sum = "h1:sofwID9zm4tzrgykg80hfFph1mryUeLRsUfoocVVmRY=", + version = "v0.0.0-20190507164030-5867b95ac084", + ) + go_repository( + name = "com_github_prometheus_tsdb", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/prometheus/tsdb", + sum = "h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=", + version = "v0.7.1", + ) + go_repository( + name = "com_github_rogpeppe_fastuuid", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/rogpeppe/fastuuid", + sum = "h1:gu+uRPtBe88sKxUCEXRoeCvVG90TJmwhiqRpvdhQFng=", + version = "v0.0.0-20150106093220-6724a57986af", + ) + go_repository( + name = "com_github_securego_gosec", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/securego/gosec", + sum = "h1:AtnWoOvTioyDXFvu96MWEeE8qj4COSQnJogzLy/u41A=", + version = "v0.0.0-20200103095621-79fbf3af8d83", + ) + go_repository( + name = "com_github_soheilhy_cmux", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/soheilhy/cmux", + sum = "h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=", + version = "v0.1.4", + ) + go_repository( + name = "com_github_spaolacci_murmur3", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/spaolacci/murmur3", + sum = "h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=", + version = "v0.0.0-20180118202830-f09979ecbc72", + ) + go_repository( + name = "com_github_stretchr_objx", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/stretchr/objx", + sum = "h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=", + version = "v0.2.0", + ) + go_repository( + name = "com_github_tmc_grpc_websocket_proxy", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/tmc/grpc-websocket-proxy", + sum = "h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=", + version = "v0.0.0-20190109142713-0ad062ec5ee5", + ) + go_repository( + name = "com_github_ugorji_go", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ugorji/go", + sum = "h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=", + version = "v1.1.4", + ) + go_repository( + name = "com_github_ultraware_whitespace", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ultraware/whitespace", + sum = "h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg=", + version = "v0.0.4", + ) + go_repository( + name = "com_github_uudashr_gocognit", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/uudashr/gocognit", + sum = "h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs=", + version = "v1.0.1", + ) + go_repository( + name = "com_github_xiang90_probing", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/xiang90/probing", + sum = "h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=", + version = "v0.0.0-20190116061207-43a291ad63a2", + ) + go_repository( + name = "in_gopkg_alecthomas_kingpin_v2", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/alecthomas/kingpin.v2", + sum = "h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=", + version = "v2.2.6", + ) + go_repository( + name = "in_gopkg_resty_v1", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/resty.v1", + sum = "h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=", + version = "v1.12.0", + ) + go_repository( + name = "io_etcd_go_bbolt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "go.etcd.io/bbolt", + sum = "h1:Z/90sZLPOeCy2PwprqkFa25PdkusRzaj9P8zm/KNyvk=", + version = "v1.3.2", + ) + go_repository( + name = "org_golang_x_mod", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/mod", + sum = "h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=", + version = "v0.2.0", + ) + go_repository( + name = "org_uber_go_atomic", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "go.uber.org/atomic", + sum = "h1:cxzIVoETapQEqDhQu3QfnvXAV4AlzcvUCxkVUFw3+EU=", + version = "v1.4.0", + ) + go_repository( + name = "org_uber_go_multierr", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "go.uber.org/multierr", + sum = "h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI=", + version = "v1.1.0", + ) + go_repository( + name = "org_uber_go_zap", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "go.uber.org/zap", + sum = "h1:ORx85nbTijNz8ljznvCMR1ZBIPKFn3jQrag10X2AsuM=", + version = "v1.10.0", + ) + + go_repository( + name = "com_github_google_uuid", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/google/uuid", + sum = "h1:b4Gk+7WdP/d3HZH8EJsZpvV7EtDOgaZLtnaNGIu1adA=", + version = "v1.0.0", + ) + go_repository( + name = "com_github_gopherjs_gopherjs", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/gopherjs/gopherjs", + sum = "h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=", + version = "v0.0.0-20181017120253-0766667cb4d1", + ) + go_repository( + name = "com_github_jtolds_gls", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jtolds/gls", + sum = "h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=", + version = "v4.20.0+incompatible", + ) + go_repository( + name = "com_github_pborman_uuid", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/pborman/uuid", + sum = "h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_smartystreets_assertions", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/smartystreets/assertions", + sum = "h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=", + version = "v0.0.0-20180927180507-b2de0cb4f26d", + ) + go_repository( + name = "com_github_smartystreets_goconvey", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/smartystreets/goconvey", + sum = "h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=", + version = "v1.6.4", + ) + go_repository( + name = "com_github_subosito_gotenv", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/subosito/gotenv", + sum = "h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=", + version = "v1.2.0", + ) + go_repository( + name = "com_github_tommy_muehle_go_mnd", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/tommy-muehle/go-mnd", + sum = "h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As=", + version = "v1.3.1-0.20200224220436-e6f9a994e8fa", + ) + go_repository( + name = "in_gopkg_ini_v1", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "gopkg.in/ini.v1", + sum = "h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=", + version = "v1.51.0", + ) + go_repository( + name = "com_github_go_sql_driver_mysql", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-sql-driver/mysql", + sum = "h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk=", + version = "v1.4.0", + ) + go_repository( + name = "com_github_jingyugao_rowserrcheck", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jingyugao/rowserrcheck", + sum = "h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk=", + version = "v0.0.0-20191204022205-72ab7603b68a", + ) + go_repository( + name = "com_github_jirfag_go_printf_func_name", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jirfag/go-printf-func-name", + sum = "h1:jNYPNLe3d8smommaoQlK7LOA5ESyUJJ+Wf79ZtA7Vp4=", + version = "v0.0.0-20191110105641-45db9963cdd3", + ) + go_repository( + name = "com_github_jmoiron_sqlx", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/jmoiron/sqlx", + sum = "h1:lrdPtrORjGv1HbbEvKWDUAy97mPpFm4B8hp77tcCUJY=", + version = "v1.2.1-0.20190826204134-d7d95172beb5", + ) + go_repository( + name = "com_github_mattn_go_sqlite3", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/mattn/go-sqlite3", + sum = "h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4=", + version = "v1.9.0", + ) + go_repository( + name = "com_github_burntsushi_xgb", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/BurntSushi/xgb", + sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=", + version = "v0.0.0-20160522181843-27f122750802", + ) + go_repository( + name = "com_github_census_instrumentation_opencensus_proto", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/census-instrumentation/opencensus-proto", + sum = "h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk=", + version = "v0.2.1", + ) + go_repository( + name = "com_github_chzyer_logex", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/chzyer/logex", + sum = "h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=", + version = "v1.1.10", + ) + go_repository( + name = "com_github_chzyer_readline", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/chzyer/readline", + sum = "h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=", + version = "v0.0.0-20180603132655-2972be24d48e", + ) + go_repository( + name = "com_github_chzyer_test", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/chzyer/test", + sum = "h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=", + version = "v0.0.0-20180213035817-a1ea475d72b1", + ) + go_repository( + name = "com_github_envoyproxy_go_control_plane", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/envoyproxy/go-control-plane", + sum = "h1:4cmBvAEBNJaGARUEs3/suWRyfyBfhf7I60WBZq+bv2w=", + version = "v0.9.1-0.20191026205805-5f8ba28d4473", + ) + go_repository( + name = "com_github_envoyproxy_protoc_gen_validate", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/envoyproxy/protoc-gen-validate", + sum = "h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A=", + version = "v0.1.0", + ) + go_repository( + name = "com_github_go_gl_glfw_v3_3_glfw", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-gl/glfw/v3.3/glfw", + sum = "h1:b+9H1GAsx5RsjvDFLoS5zkNBzIQMuVKUYQDmxU3N5XE=", + version = "v0.0.0-20191125211704-12ad95a8df72", + ) + go_repository( + name = "com_github_ianlancetaylor_demangle", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ianlancetaylor/demangle", + sum = "h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=", + version = "v0.0.0-20181102032728-5e5cf60278f6", + ) + go_repository( + name = "com_github_nytimes_gziphandler", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/NYTimes/gziphandler", + sum = "h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=", + version = "v1.1.1", + ) + go_repository( + name = "com_google_cloud_go_bigquery", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "cloud.google.com/go/bigquery", + sum = "h1:hL+ycaJpVE9M7nLoiXb/Pn10ENE2u+oddxbD8uu0ZVU=", + version = "v1.0.1", + ) + go_repository( + name = "com_google_cloud_go_datastore", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "cloud.google.com/go/datastore", + sum = "h1:Kt+gOPPp2LEPWp8CSfxhsM8ik9CcyE/gYu+0r+RnZvM=", + version = "v1.0.0", + ) + go_repository( + name = "com_google_cloud_go_pubsub", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "cloud.google.com/go/pubsub", + sum = "h1:W9tAK3E57P75u0XLLR82LZyw8VpAnhmyTOxW9qzmyj8=", + version = "v1.0.1", + ) + go_repository( + name = "com_google_cloud_go_storage", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "cloud.google.com/go/storage", + sum = "h1:VV2nUM3wwLLGh9lSABFgZMjInyUbJeaRSE64WuAIQ+4=", + version = "v1.0.0", + ) + go_repository( + name = "com_shuralyov_dmitri_gpu_mtl", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "dmitri.shuralyov.com/gpu/mtl", + sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=", + version = "v0.0.0-20190408044501-666a987793e9", + ) + go_repository( + name = "io_rsc_binaryregexp", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "rsc.io/binaryregexp", + sum = "h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=", + version = "v0.2.0", + ) + go_repository( + name = "org_golang_x_image", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/image", + sum = "h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=", + version = "v0.0.0-20190802002840-cff245a6509b", + ) + go_repository( + name = "org_golang_x_mobile", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "golang.org/x/mobile", + sum = "h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs=", + version = "v0.0.0-20190719004257-d2bd2a29d028", + ) + go_repository( + name = "com_github_bombsimon_wsl_v3", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/bombsimon/wsl/v3", + sum = "h1:w9f49xQatuaeTJFaNP4SpiWSR5vfT6IstPtM62JjcqA=", + version = "v3.0.0", + ) + go_repository( + name = "com_github_go_xmlfmt_xmlfmt", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/go-xmlfmt/xmlfmt", + sum = "h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo=", + version = "v0.0.0-20191208150333-d5b6f63a941b", + ) + go_repository( + name = "com_github_maratori_testpackage", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/maratori/testpackage", + sum = "h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ=", + version = "v1.0.1", + ) + go_repository( + name = "com_github_nakabonne_nestif", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/nakabonne/nestif", + sum = "h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw=", + version = "v0.3.0", + ) + go_repository( + name = "com_github_niemeyer_pretty", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/niemeyer/pretty", + sum = "h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=", + version = "v0.0.0-20200227124842-a10e7caefd8e", + ) + go_repository( + name = "com_github_phayes_checkstyle", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/phayes/checkstyle", + sum = "h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA=", + version = "v0.0.0-20170904204023-bfd46e6a821d", + ) + go_repository( + name = "com_github_ryancurrah_gomodguard", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/ryancurrah/gomodguard", + sum = "h1:vumZpZardqQ9EfFIZDNEpKaMxfqqEBMhu0uSRcDO5x4=", + version = "v1.0.2", + ) + go_repository( + name = "com_github_tetafro_godot", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/tetafro/godot", + sum = "h1:7+EYJM/Z4gYZhBFdRrVm6JTj5ZLw/QI1j4RfEOXJviE=", + version = "v0.2.5", + ) + go_repository( + name = "com_github_yuin_goldmark", + build_file_generation = "on", + build_file_proto_mode = "disable", + importpath = "github.com/yuin/goldmark", + sum = "h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo=", + version = "v1.1.27", + ) diff --git a/tools/CROSSTOOL b/tools/CROSSTOOL new file mode 100755 index 0000000000..84a2c5c724 --- /dev/null +++ b/tools/CROSSTOOL @@ -0,0 +1,512 @@ +# Copyright YEAR The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# DO NOT EDIT +# This file contains the text format encoding of a +# com.google.devtools.build.lib.view.config.crosstool.CrosstoolRelease +# protocol buffer generated by generate_crosstool. + +major_version: "local" +minor_version: "" +toolchain: < + toolchain_identifier: "host" + host_system_name: "host" + target_system_name: "host" + target_cpu: "k8" + target_libc: "k8-local" + compiler: "gcc" + abi_version: "k8-local" + abi_libc_version: "k8-local" + tool_path: < + name: "ar" + path: "/usr/bin/ar" + > + tool_path: < + name: "ld" + path: "/usr/bin/ld" + > + tool_path: < + name: "cpp" + path: "/usr/bin/cpp" + > + tool_path: < + name: "dwp" + path: "/usr/bin/dwp" + > + tool_path: < + name: "gcc" + path: "/usr/bin/gcc" + > + tool_path: < + name: "gcov" + path: "/usr/bin/gcov" + > + tool_path: < + name: "ld" + path: "/usr/bin/ld" + > + tool_path: < + name: "nm" + path: "/usr/bin/nm" + > + tool_path: < + name: "objcopy" + path: "/usr/bin/objcopy" + > + tool_path: < + name: "objdump" + path: "/usr/bin/objdump" + > + tool_path: < + name: "strip" + path: "/usr/bin/strip" + > + supports_gold_linker: false + supports_start_end_lib: false + supports_interface_shared_objects: false + supports_incremental_linker: false + supports_normalizing_ar: false + supports_fission: false + needsPic: true + compiler_flag: "-U_FORTIFY_SOURCE" + compiler_flag: "-D_FORTIFY_SOURCE=1" + compiler_flag: "-fstack-protector" + compiler_flag: "-Wall" + compiler_flag: "-Wunused-but-set-parameter" + compiler_flag: "-Wno-free-nonheap-object" + compiler_flag: "-fno-omit-frame-pointer" + unfiltered_cxx_flag: "-no-canonical-prefixes" + unfiltered_cxx_flag: "-fno-canonical-system-headers" + unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" + unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" + linker_flag: "-Wl,-z,relro,-z,now" + linker_flag: "-no-canonical-prefixes" + linker_flag: "-pass-exit-codes" + objcopy_embed_flag: "-I" + objcopy_embed_flag: "binary" + compilation_mode_flags: < + mode: DBG + compiler_flag: "-g" + > + compilation_mode_flags: < + mode: OPT + compiler_flag: "-g0" + compiler_flag: "-O2" + compiler_flag: "-DNDEBUG" + compiler_flag: "-ffunction-sections" + compiler_flag: "-fdata-sections" + linker_flag: "-Wl,--gc-sections" + > + linking_mode_flags: < + mode: DYNAMIC + > + cxx_builtin_include_directory: "/usr/lib/gcc" + cxx_builtin_include_directory: "/usr/local/include" + cxx_builtin_include_directory: "/usr/include" + builtin_sysroot: "" +> +toolchain: < + toolchain_identifier: "cross-arm-linux-gnueabihf" + host_system_name: "host" + target_system_name: "cross-arm-linux-gnueabihf" + target_cpu: "arm" + target_libc: "arm-linux-gnueabihf" + compiler: "gcc" + abi_version: "arm-linux-gnueabihf" + abi_libc_version: "arm-linux-gnueabihf" + tool_path: < + name: "ar" + path: "/usr/bin/arm-linux-gnueabihf-ar" + > + tool_path: < + name: "ld" + path: "/usr/bin/arm-linux-gnueabihf-ld" + > + tool_path: < + name: "cpp" + path: "/usr/bin/arm-linux-gnueabihf-cpp" + > + tool_path: < + name: "dwp" + path: "/usr/bin/arm-linux-gnueabihf-dwp" + > + tool_path: < + name: "gcc" + path: "/usr/bin/arm-linux-gnueabihf-gcc" + > + tool_path: < + name: "gcov" + path: "/usr/bin/arm-linux-gnueabihf-gcov" + > + tool_path: < + name: "ld" + path: "/usr/bin/arm-linux-gnueabihf-ld" + > + tool_path: < + name: "nm" + path: "/usr/bin/arm-linux-gnueabihf-nm" + > + tool_path: < + name: "objcopy" + path: "/usr/bin/arm-linux-gnueabihf-objcopy" + > + tool_path: < + name: "objdump" + path: "/usr/bin/arm-linux-gnueabihf-objdump" + > + tool_path: < + name: "strip" + path: "/usr/bin/arm-linux-gnueabihf-strip" + > + supports_gold_linker: false + supports_start_end_lib: false + supports_interface_shared_objects: false + supports_incremental_linker: false + supports_normalizing_ar: false + supports_fission: false + needsPic: true + compiler_flag: "-U_FORTIFY_SOURCE" + compiler_flag: "-D_FORTIFY_SOURCE=1" + compiler_flag: "-fstack-protector" + compiler_flag: "-Wall" + compiler_flag: "-Wunused-but-set-parameter" + compiler_flag: "-Wno-free-nonheap-object" + compiler_flag: "-fno-omit-frame-pointer" + unfiltered_cxx_flag: "-no-canonical-prefixes" + unfiltered_cxx_flag: "-fno-canonical-system-headers" + unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" + unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" + linker_flag: "-Wl,-z,relro,-z,now" + linker_flag: "-no-canonical-prefixes" + linker_flag: "-pass-exit-codes" + objcopy_embed_flag: "-I" + objcopy_embed_flag: "binary" + compilation_mode_flags: < + mode: DBG + compiler_flag: "-g" + > + compilation_mode_flags: < + mode: OPT + compiler_flag: "-g0" + compiler_flag: "-O2" + compiler_flag: "-DNDEBUG" + compiler_flag: "-ffunction-sections" + compiler_flag: "-fdata-sections" + linker_flag: "-Wl,--gc-sections" + > + linking_mode_flags: < + mode: DYNAMIC + > + cxx_builtin_include_directory: "/usr/arm-linux-gnueabihf/include" + cxx_builtin_include_directory: "/usr/lib/gcc-cross/arm-linux-gnueabihf" + builtin_sysroot: "" +> +toolchain: < + toolchain_identifier: "cross-aarch64-linux-gnu" + host_system_name: "host" + target_system_name: "cross-aarch64-linux-gnu" + target_cpu: "aarch64" + target_libc: "aarch64-linux-gnu" + compiler: "gcc" + abi_version: "aarch64-linux-gnu" + abi_libc_version: "aarch64-linux-gnu" + tool_path: < + name: "ar" + path: "/usr/bin/aarch64-linux-gnu-ar" + > + tool_path: < + name: "ld" + path: "/usr/bin/aarch64-linux-gnu-ld" + > + tool_path: < + name: "cpp" + path: "/usr/bin/aarch64-linux-gnu-cpp" + > + tool_path: < + name: "dwp" + path: "/usr/bin/aarch64-linux-gnu-dwp" + > + tool_path: < + name: "gcc" + path: "/usr/bin/aarch64-linux-gnu-gcc" + > + tool_path: < + name: "gcov" + path: "/usr/bin/aarch64-linux-gnu-gcov" + > + tool_path: < + name: "ld" + path: "/usr/bin/aarch64-linux-gnu-ld" + > + tool_path: < + name: "nm" + path: "/usr/bin/aarch64-linux-gnu-nm" + > + tool_path: < + name: "objcopy" + path: "/usr/bin/aarch64-linux-gnu-objcopy" + > + tool_path: < + name: "objdump" + path: "/usr/bin/aarch64-linux-gnu-objdump" + > + tool_path: < + name: "strip" + path: "/usr/bin/aarch64-linux-gnu-strip" + > + supports_gold_linker: false + supports_start_end_lib: false + supports_interface_shared_objects: false + supports_incremental_linker: false + supports_normalizing_ar: false + supports_fission: false + needsPic: true + compiler_flag: "-U_FORTIFY_SOURCE" + compiler_flag: "-D_FORTIFY_SOURCE=1" + compiler_flag: "-fstack-protector" + compiler_flag: "-Wall" + compiler_flag: "-Wunused-but-set-parameter" + compiler_flag: "-Wno-free-nonheap-object" + compiler_flag: "-fno-omit-frame-pointer" + unfiltered_cxx_flag: "-no-canonical-prefixes" + unfiltered_cxx_flag: "-fno-canonical-system-headers" + unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" + unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" + linker_flag: "-Wl,-z,relro,-z,now" + linker_flag: "-no-canonical-prefixes" + linker_flag: "-pass-exit-codes" + objcopy_embed_flag: "-I" + objcopy_embed_flag: "binary" + compilation_mode_flags: < + mode: DBG + compiler_flag: "-g" + > + compilation_mode_flags: < + mode: OPT + compiler_flag: "-g0" + compiler_flag: "-O2" + compiler_flag: "-DNDEBUG" + compiler_flag: "-ffunction-sections" + compiler_flag: "-fdata-sections" + linker_flag: "-Wl,--gc-sections" + > + linking_mode_flags: < + mode: DYNAMIC + > + cxx_builtin_include_directory: "/usr/aarch64-linux-gnu/include" + cxx_builtin_include_directory: "/usr/lib/gcc-cross/aarch64-linux-gnu" + builtin_sysroot: "" +> +toolchain: < + toolchain_identifier: "cross-powerpc64le-linux-gnu" + host_system_name: "host" + target_system_name: "cross-powerpc64le-linux-gnu" + target_cpu: "powerpc64le" + target_libc: "powerpc64le-linux-gnu" + compiler: "gcc" + abi_version: "powerpc64le-linux-gnu" + abi_libc_version: "powerpc64le-linux-gnu" + tool_path: < + name: "ar" + path: "/usr/bin/powerpc64le-linux-gnu-ar" + > + tool_path: < + name: "ld" + path: "/usr/bin/powerpc64le-linux-gnu-ld" + > + tool_path: < + name: "cpp" + path: "/usr/bin/powerpc64le-linux-gnu-cpp" + > + tool_path: < + name: "dwp" + path: "/usr/bin/powerpc64le-linux-gnu-dwp" + > + tool_path: < + name: "gcc" + path: "/usr/bin/powerpc64le-linux-gnu-gcc" + > + tool_path: < + name: "gcov" + path: "/usr/bin/powerpc64le-linux-gnu-gcov" + > + tool_path: < + name: "ld" + path: "/usr/bin/powerpc64le-linux-gnu-ld" + > + tool_path: < + name: "nm" + path: "/usr/bin/powerpc64le-linux-gnu-nm" + > + tool_path: < + name: "objcopy" + path: "/usr/bin/powerpc64le-linux-gnu-objcopy" + > + tool_path: < + name: "objdump" + path: "/usr/bin/powerpc64le-linux-gnu-objdump" + > + tool_path: < + name: "strip" + path: "/usr/bin/powerpc64le-linux-gnu-strip" + > + supports_gold_linker: false + supports_start_end_lib: false + supports_interface_shared_objects: false + supports_incremental_linker: false + supports_normalizing_ar: false + supports_fission: false + needsPic: true + compiler_flag: "-U_FORTIFY_SOURCE" + compiler_flag: "-D_FORTIFY_SOURCE=1" + compiler_flag: "-fstack-protector" + compiler_flag: "-Wall" + compiler_flag: "-Wunused-but-set-parameter" + compiler_flag: "-Wno-free-nonheap-object" + compiler_flag: "-fno-omit-frame-pointer" + unfiltered_cxx_flag: "-no-canonical-prefixes" + unfiltered_cxx_flag: "-fno-canonical-system-headers" + unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" + unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" + linker_flag: "-Wl,-z,relro,-z,now" + linker_flag: "-no-canonical-prefixes" + linker_flag: "-pass-exit-codes" + objcopy_embed_flag: "-I" + objcopy_embed_flag: "binary" + compilation_mode_flags: < + mode: DBG + compiler_flag: "-g" + > + compilation_mode_flags: < + mode: OPT + compiler_flag: "-g0" + compiler_flag: "-O2" + compiler_flag: "-DNDEBUG" + compiler_flag: "-ffunction-sections" + compiler_flag: "-fdata-sections" + linker_flag: "-Wl,--gc-sections" + > + linking_mode_flags: < + mode: DYNAMIC + > + cxx_builtin_include_directory: "/usr/powerpc64le-linux-gnu/include" + cxx_builtin_include_directory: "/usr/lib/gcc-cross/powerpc64le-linux-gnu" + builtin_sysroot: "" +> +toolchain: < + toolchain_identifier: "cross-s390x-linux-gnu" + host_system_name: "host" + target_system_name: "cross-s390x-linux-gnu" + target_cpu: "s390x" + target_libc: "s390x-linux-gnu" + compiler: "gcc" + abi_version: "s390x-linux-gnu" + abi_libc_version: "s390x-linux-gnu" + tool_path: < + name: "ar" + path: "/usr/bin/s390x-linux-gnu-ar" + > + tool_path: < + name: "ld" + path: "/usr/bin/s390x-linux-gnu-ld" + > + tool_path: < + name: "cpp" + path: "/usr/bin/s390x-linux-gnu-cpp" + > + tool_path: < + name: "dwp" + path: "/usr/bin/s390x-linux-gnu-dwp" + > + tool_path: < + name: "gcc" + path: "/usr/bin/s390x-linux-gnu-gcc" + > + tool_path: < + name: "gcov" + path: "/usr/bin/s390x-linux-gnu-gcov" + > + tool_path: < + name: "ld" + path: "/usr/bin/s390x-linux-gnu-ld" + > + tool_path: < + name: "nm" + path: "/usr/bin/s390x-linux-gnu-nm" + > + tool_path: < + name: "objcopy" + path: "/usr/bin/s390x-linux-gnu-objcopy" + > + tool_path: < + name: "objdump" + path: "/usr/bin/s390x-linux-gnu-objdump" + > + tool_path: < + name: "strip" + path: "/usr/bin/s390x-linux-gnu-strip" + > + supports_gold_linker: false + supports_start_end_lib: false + supports_interface_shared_objects: false + supports_incremental_linker: false + supports_normalizing_ar: false + supports_fission: false + needsPic: true + compiler_flag: "-U_FORTIFY_SOURCE" + compiler_flag: "-D_FORTIFY_SOURCE=1" + compiler_flag: "-fstack-protector" + compiler_flag: "-Wall" + compiler_flag: "-Wunused-but-set-parameter" + compiler_flag: "-Wno-free-nonheap-object" + compiler_flag: "-fno-omit-frame-pointer" + unfiltered_cxx_flag: "-no-canonical-prefixes" + unfiltered_cxx_flag: "-fno-canonical-system-headers" + unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" + unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" + linker_flag: "-Wl,-z,relro,-z,now" + linker_flag: "-no-canonical-prefixes" + linker_flag: "-pass-exit-codes" + objcopy_embed_flag: "-I" + objcopy_embed_flag: "binary" + compilation_mode_flags: < + mode: DBG + compiler_flag: "-g" + > + compilation_mode_flags: < + mode: OPT + compiler_flag: "-g0" + compiler_flag: "-O2" + compiler_flag: "-DNDEBUG" + compiler_flag: "-ffunction-sections" + compiler_flag: "-fdata-sections" + linker_flag: "-Wl,--gc-sections" + > + linking_mode_flags: < + mode: DYNAMIC + > + cxx_builtin_include_directory: "/usr/s390x-linux-gnu/include" + cxx_builtin_include_directory: "/usr/lib/gcc-cross/s390x-linux-gnu" + builtin_sysroot: "" +> diff --git a/tools/build_tar/BUILD.bazel b/tools/build_tar/BUILD.bazel new file mode 100644 index 0000000000..7f081a7f4a --- /dev/null +++ b/tools/build_tar/BUILD.bazel @@ -0,0 +1,32 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = ["buildtar.go"], + importpath = "k8s.io/repo-infra/tools/build_tar", + visibility = ["//visibility:private"], + deps = [ + "@io_k8s_klog_v2//:go_default_library", + "@org_golang_x_build//pargzip:go_default_library", + ], +) + +go_binary( + name = "build_tar", + embed = [":go_default_library"], + visibility = ["//visibility:public"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/tools/build_tar/buildtar.go b/tools/build_tar/buildtar.go new file mode 100644 index 0000000000..d53126b78c --- /dev/null +++ b/tools/build_tar/buildtar.go @@ -0,0 +1,611 @@ +/* +Copyright 2017 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// fast tar builder for bazel +package main + +import ( + "archive/tar" + "bufio" + "compress/bzip2" + "compress/gzip" + "flag" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "golang.org/x/build/pargzip" + + "k8s.io/klog/v2" +) + +func main() { + var ( + flagfile string + + output string + directory string + compression string + + files multiString + tars multiString + debs multiString + links multiString + + mode string + modes multiString + + owner string + owners multiString + ownerName string + ownerNames multiString + + mtime string + ) + + flag.StringVar(&flagfile, "flagfile", "", "Path to flagfile") + + flag.StringVar(&output, "output", "", "The output file, mandatory") + flag.StringVar(&directory, "directory", "", "Directory in which to store the file inside the layer") + flag.StringVar(&compression, "compression", "", "Compression (`gz` or `bz2`), default is none.") + + flag.Var(&files, "file", "A file to add to the layer") + flag.Var(&tars, "tar", "A tar file to add to the layer") + flag.Var(&debs, "deb", "A debian package to add to the layer") + flag.Var(&links, "link", "Add a symlink a inside the layer ponting to b if a:b is specified") + + flag.StringVar(&mode, "mode", "", "Force the mode on the added files (in octal).") + flag.Var(&modes, "modes", "Specific mode to apply to specific file (from the file argument), e.g., path/to/file=0455.") + + flag.StringVar(&owner, "owner", "0.0", "Specify the numeric default owner of all files, e.g., 0.0") + flag.Var(&owners, "owners", "Specify the numeric owners of individual files, e.g. path/to/file=0.0.") + flag.StringVar(&ownerName, "owner_name", "", "Specify the owner name of all files, e.g. root.root.") + flag.Var(&ownerNames, "owner_names", "Specify the owner names of individual files, e.g. path/to/file=root.root.") + + flag.StringVar(&mtime, "mtime", "", + "mtime to set on tar file entries. May be an integer (corresponding to epoch seconds) or the value \"portable\", which will use the value 2000-01-01, usable with non *nix OSes") + + flag.Set("logtostderr", "true") + + flag.Parse() + + if flagfile != "" { + b, err := ioutil.ReadFile(flagfile) + if err != nil { + klog.Fatalf("couldn't read flagfile: %v", err) + } + cmdline := strings.Split(string(b), "\n") + flag.CommandLine.Parse(cmdline) + } + + if output == "" { + klog.Fatalf("--output flag is required") + } + + parsedMtime, err := parseMtimeFlag(mtime) + if err != nil { + klog.Fatalf("invalid value for --mtime: %s", mtime) + } + + meta := newFileMeta(mode, modes, owner, owners, ownerName, ownerNames, parsedMtime) + + tf, err := newTarFile(output, directory, compression, meta) + if err != nil { + klog.Fatalf("couldn't build tar: %v", err) + } + defer tf.Close() + + for _, file := range files { + parts := strings.SplitN(file, "=", 2) + if len(parts) != 2 { + klog.Fatalf("bad parts length for file %q", file) + } + if err := tf.addFile(parts[0], parts[1]); err != nil { + klog.Fatalf("couldn't add file: %v", err) + } + } + + for _, tar := range tars { + if err := tf.addTar(tar); err != nil { + klog.Fatalf("couldn't add tar: %v", err) + } + } + + for _, deb := range debs { + if err := tf.addDeb(deb); err != nil { + klog.Fatalf("couldn't add deb: %v", err) + } + } + + for _, link := range links { + parts := strings.SplitN(link, ":", 2) + if len(parts) != 2 { + klog.Fatalf("bad parts length for link %q", link) + } + if err := tf.addLink(parts[0], parts[1]); err != nil { + klog.Fatalf("couldn't add link: %v", err) + } + } +} + +type tarFile struct { + directory string + + tw *tar.Writer + + meta fileMeta + dirsMade map[string]struct{} + filesMade map[string]struct{} + + closers []func() +} + +func newTarFile(output, directory, compression string, meta fileMeta) (*tarFile, error) { + var ( + w io.Writer + closers []func() + ) + f, err := os.Create(output) + if err != nil { + return nil, err + } + closers = append(closers, func() { + f.Close() + }) + w = f + + buf := bufio.NewWriter(w) + closers = append(closers, func() { buf.Flush() }) + w = buf + + switch compression { + case "": + case "gz": + gzw := pargzip.NewWriter(w) + closers = append(closers, func() { gzw.Close() }) + w = gzw + case "bz2", "xz": + return nil, fmt.Errorf("%q compression is not supported yet", compression) + default: + return nil, fmt.Errorf("unknown compression %q", compression) + } + + tw := tar.NewWriter(w) + closers = append(closers, func() { tw.Close() }) + + return &tarFile{ + directory: directory, + tw: tw, + closers: closers, + meta: meta, + dirsMade: map[string]struct{}{}, + filesMade: map[string]struct{}{}, + }, nil +} + +func (f *tarFile) addFile(file, dest string) error { + dest = strings.TrimLeft(dest, "/") + dest = filepath.Clean(dest) + + uid := f.meta.getUID(dest) + gid := f.meta.getGID(dest) + uname := f.meta.getUname(dest) + gname := f.meta.getGname(dest) + + dest = filepath.Join(strings.TrimLeft(f.directory, "/"), dest) + dest = filepath.Clean(dest) + + if ok := f.tryReservePath(dest); !ok { + klog.Warningf("Duplicate file in archive: %v, picking first occurence", dest) + return nil + } + + info, err := os.Stat(file) + if err != nil { + return err + } + + mode := f.meta.getMode(dest) + // If mode is unspecified, derive the mode from the file's mode. + if mode == 0 { + mode = os.FileMode(0644) + if info.Mode().Perm()&os.FileMode(0111) != 0 { + mode = os.FileMode(0755) + } + } + + header := tar.Header{ + Name: dest, + Mode: int64(mode), + Uid: uid, + Gid: gid, + Size: 0, + Uname: uname, + Gname: gname, + ModTime: f.meta.modTime, + } + + if err := f.makeDirs(header); err != nil { + return err + } + + switch { + case info.Mode()&os.ModeSymlink != 0: + return fmt.Errorf("addFile: didn't expect symlink: %s", file) + case info.Mode()&os.ModeNamedPipe != 0: + return fmt.Errorf("addFile: didn't expect named pipe: %s", file) + case info.Mode()&os.ModeSocket != 0: + return fmt.Errorf("addFile: didn't expect socket: %s", file) + case info.Mode()&os.ModeDevice != 0: + return fmt.Errorf("addFile: didn't expect device: %s", file) + case info.Mode()&os.ModeDir != 0: + header.Typeflag = tar.TypeDir + if err := f.tw.WriteHeader(&header); err != nil { + return err + } + default: + //regular file + header.Typeflag = tar.TypeReg + b, err := ioutil.ReadFile(file) + if err != nil { + return err + } + header.Size = int64(len(b)) + if err := f.tw.WriteHeader(&header); err != nil { + return err + } + if _, err := f.tw.Write(b); err != nil { + return err + } + } + return nil +} + +func (f *tarFile) addLink(symlink, target string) error { + if ok := f.tryReservePath(symlink); !ok { + klog.Warningf("Duplicate file in archive: %v, picking first occurence", symlink) + return nil + } + header := tar.Header{ + Name: symlink, + Typeflag: tar.TypeSymlink, + Linkname: target, + Mode: int64(0777), // symlinks should always have 0777 mode + ModTime: f.meta.modTime, + } + if err := f.makeDirs(header); err != nil { + return err + } + return f.tw.WriteHeader(&header) +} + +func (f *tarFile) addTar(toAdd string) error { + root := "" + if f.directory != "/" { + root = f.directory + } + + var r io.Reader + + file, err := os.Open(toAdd) + if err != nil { + return err + } + defer file.Close() + r = file + + r = bufio.NewReader(r) + + switch { + case strings.HasSuffix(toAdd, "gz"): + gzr, err := gzip.NewReader(r) + if err != nil { + return err + } + r = gzr + case strings.HasSuffix(toAdd, "bz2"): + bz2r := bzip2.NewReader(r) + r = bz2r + case strings.HasSuffix(toAdd, "xz"): + return fmt.Errorf("%q decompression is not supported yet", toAdd) + default: + } + + tr := tar.NewReader(r) + + for { + header, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return err + } + header.Name = filepath.Join(root, header.Name) + if header.Typeflag == tar.TypeDir && !strings.HasSuffix(header.Name, "/") { + header.Name = header.Name + "/" + } else if ok := f.tryReservePath(header.Name); !ok { + klog.Warningf("Duplicate file in archive: %v, picking first occurence", header.Name) + continue + } + // Create root directories with same permissions if missing. + // makeDirs keeps track of which directories exist, + // so it's safe to duplicate this here. + if err = f.makeDirs(*header); err != nil { + return err + } + // If this is a directory, then makeDirs already created it, + // so skip to the next entry. + if header.Typeflag == tar.TypeDir { + continue + } + err = f.tw.WriteHeader(header) + if err != nil { + return err + } + if _, err = io.Copy(f.tw, tr); err != nil { + return err + } + } + return nil +} + +func (f *tarFile) addDeb(toAdd string) error { + return fmt.Errorf("addDeb unimplemented") +} + +func (f *tarFile) makeDirs(header tar.Header) error { + dirToMake := []string{} + dir := header.Name + for { + dir = filepath.Dir(dir) + if dir == "." || dir == "/" { + break + } + dirToMake = append(dirToMake, dir) + } + for i := len(dirToMake) - 1; i >= 0; i-- { + dir := dirToMake[i] + if _, ok := f.dirsMade[dir]; ok { + continue + } + dh := header + // Add the x bit to directories if the read bit is set, + // and make sure all directories are at least user RWX. + dh.Mode = header.Mode | 0700 | ((0444 & header.Mode) >> 2) + dh.Typeflag = tar.TypeDir + dh.Name = dir + "/" + if err := f.tw.WriteHeader(&dh); err != nil { + return err + } + + f.dirsMade[dir] = struct{}{} + } + return nil +} + +func (f *tarFile) tryReservePath(path string) bool { + if _, ok := f.filesMade[path]; ok { + return false + } + if _, ok := f.dirsMade[path]; ok { + return false + } + f.filesMade[path] = struct{}{} + return true +} + +func (f *tarFile) Close() { + for i := len(f.closers) - 1; i >= 0; i-- { + f.closers[i]() + } +} + +// parseMtimeFlag matches the functionality of Bazel's python-based build_tar and archive modules +// for the --mtime flag. +// In particular: +// - if no value is provided, use the Unix epoch +// - if the string "portable" is provided, use a "deterministic date compatible with non *nix OSes" +// - if an integer is provided, interpret that as the number of seconds since Unix epoch +func parseMtimeFlag(input string) (time.Time, error) { + if input == "" { + return time.Unix(0, 0), nil + } else if input == "portable" { + // A deterministic time compatible with non *nix OSes. + // See also https://github.com/bazelbuild/bazel/issues/1299. + return time.Date(2000, 1, 1, 0, 0, 0, 0, time.UTC), nil + } + seconds, err := strconv.ParseInt(input, 10, 64) + if err != nil { + return time.Unix(0, 0), err + } + return time.Unix(seconds, 0), nil +} + +func newFileMeta( + mode string, + modes multiString, + owner string, + owners multiString, + ownerName string, + ownerNames multiString, + modTime time.Time, +) fileMeta { + meta := fileMeta{ + modTime: modTime, + } + + if mode != "" { + i, err := strconv.ParseUint(mode, 8, 32) + if err != nil { + klog.Fatalf("couldn't parse mode: %v", mode) + } + meta.defaultMode = os.FileMode(i) + } + + meta.modeMap = map[string]os.FileMode{} + for _, filemode := range modes { + parts := strings.SplitN(filemode, "=", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q", filemode) + } + if parts[0][0] == '/' { + parts[0] = parts[0][1:] + } + i, err := strconv.ParseUint(parts[1], 8, 32) + if err != nil { + klog.Fatalf("couldn't parse mode: %v", filemode) + } + meta.modeMap[parts[0]] = os.FileMode(i) + } + + if ownerName != "" { + parts := strings.SplitN(ownerName, ".", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q", ownerName) + } + meta.defaultUname = parts[0] + meta.defaultGname = parts[1] + } + + meta.unameMap = map[string]string{} + meta.gnameMap = map[string]string{} + for _, name := range ownerNames { + parts := strings.SplitN(name, "=", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q %v", name, parts) + } + filename, ownername := parts[0], parts[1] + + parts = strings.SplitN(ownername, ".", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q", name) + } + uname, gname := parts[0], parts[1] + + meta.unameMap[filename] = uname + meta.gnameMap[filename] = gname + } + + if owner != "" { + parts := strings.SplitN(owner, ".", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q", owner) + } + uid, err := strconv.Atoi(parts[0]) + if err != nil { + klog.Fatalf("could not parse uid: %q", parts[0]) + } + gid, err := strconv.Atoi(parts[1]) + if err != nil { + klog.Fatalf("could not parse gid: %q", parts[1]) + } + meta.defaultUID = uid + meta.defaultGID = gid + + } + + meta.uidMap = map[string]int{} + meta.gidMap = map[string]int{} + for _, owner := range owners { + parts := strings.SplitN(owner, "=", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q", owner) + } + filename, owner := parts[0], parts[1] + + parts = strings.SplitN(parts[1], ".", 2) + if len(parts) != 2 { + klog.Fatalf("expected two parts to %q", owner) + } + uid, err := strconv.Atoi(parts[0]) + if err != nil { + klog.Fatalf("could not parse uid: %q", parts[0]) + } + gid, err := strconv.Atoi(parts[1]) + if err != nil { + klog.Fatalf("could not parse gid: %q", parts[1]) + } + meta.uidMap[filename] = uid + meta.gidMap[filename] = gid + } + + return meta +} + +type fileMeta struct { + defaultGID, defaultUID int + gidMap, uidMap map[string]int + + defaultGname, defaultUname string + gnameMap, unameMap map[string]string + + defaultMode os.FileMode + modeMap map[string]os.FileMode + + modTime time.Time +} + +func (f *fileMeta) getGID(fname string) int { + if id, ok := f.gidMap[fname]; ok { + return id + } + return f.defaultGID +} + +func (f *fileMeta) getUID(fname string) int { + if id, ok := f.uidMap[fname]; ok { + return id + } + return f.defaultUID +} + +func (f *fileMeta) getGname(fname string) string { + if name, ok := f.gnameMap[fname]; ok { + return name + } + return f.defaultGname +} + +func (f *fileMeta) getUname(fname string) string { + if name, ok := f.unameMap[fname]; ok { + return name + } + return f.defaultUname +} + +func (f *fileMeta) getMode(fname string) os.FileMode { + if mode, ok := f.modeMap[fname]; ok { + return mode + } + return f.defaultMode +} + +type multiString []string + +func (ms *multiString) String() string { + return strings.Join(*ms, ",") +} + +func (ms *multiString) Set(v string) error { + *ms = append(*ms, v) + return nil +} diff --git a/verify/BUILD.bazel b/verify/BUILD.bazel new file mode 100644 index 0000000000..841a623193 --- /dev/null +++ b/verify/BUILD.bazel @@ -0,0 +1,16 @@ +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [ + ":package-srcs", + "//verify/boilerplate:all-srcs", + ], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/verify/README.md b/verify/README.md new file mode 100644 index 0000000000..6eca4ece1b --- /dev/null +++ b/verify/README.md @@ -0,0 +1,52 @@ +# Verification scripts + +Collection of scripts that verifies that a project meets requirements set for kubernetes related projects. The scripts are to be invoked depending on the needs via CI tooling, such as Travis CI. See main Readme file on how to integrate the repo-infra in your project. + +The scripts are currently being migrated from the main kubernetes repository. If your project requires additional set of verifications, consider creating an issue/PR on repo-infra to avoid code duplication across multiple projects. + +If repo-infra is integrated at the root of your project as git submodule at path: `/repo-infra`, +then scripts can be invoked as `repo-infra/verify/verify-*.sh` + +travis.yaml example: + +``` +dist: trusty + +os: +- linux + +language: go + +go: +- 1.8 + +before_install: +- go get -u github.com/alecthomas/gometalinter + +install: +- gometalinter --install + +script: +- repo-infra/verify/verify-go-src.sh -v +- repo-infra/verify/verify-boilerplate.sh +# OR with vendoring +# - vendor/github.com/kubernetes/repo-infra/verify-go-src.sh --rootdir=$(pwd) -v +``` + +## Verify boilerplate + +Verifies that the boilerplate for various formats (go files, Makefile, etc.) is included in each file: `verify-boilerplate.sh`. + +## Verify go source code + +Runs a set of scripts on the go source code excluding vendored files: `verify-go-src.sh`. Expects `gometalinter` tooling installed (see travis file above) + +With git submodule from your repo root: `repo-infra/verify/verify-go-src.sh -v` + +With vendoring: `vendor/repo-infra/verify/verify-go-src.sh -v --rootdir $(pwd)` + +Checks include: + +1. gofmt +2. gometalinter +3. govet diff --git a/verify/boilerplate/BUILD.bazel b/verify/boilerplate/BUILD.bazel new file mode 100644 index 0000000000..245e4be71c --- /dev/null +++ b/verify/boilerplate/BUILD.bazel @@ -0,0 +1,29 @@ +# gazelle:exclude test +# gazelle:exclude verify/boilerplate/test +package(default_visibility = ["//visibility:public"]) + +exports_files(glob(["*.txt"])) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) + +filegroup( + name = "templates", + srcs = glob(["boilerplate.*.txt"]), +) + +filegroup( + name = "testdata", + srcs = glob(["test/**"]), +) diff --git a/verify/boilerplate/boilerplate.Dockerfile.txt b/verify/boilerplate/boilerplate.Dockerfile.txt new file mode 100644 index 0000000000..384f325abf --- /dev/null +++ b/verify/boilerplate/boilerplate.Dockerfile.txt @@ -0,0 +1,14 @@ +# Copyright YEAR The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/verify/boilerplate/boilerplate.Makefile.txt b/verify/boilerplate/boilerplate.Makefile.txt new file mode 100644 index 0000000000..384f325abf --- /dev/null +++ b/verify/boilerplate/boilerplate.Makefile.txt @@ -0,0 +1,14 @@ +# Copyright YEAR The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/verify/boilerplate/boilerplate.bzl.txt b/verify/boilerplate/boilerplate.bzl.txt new file mode 100644 index 0000000000..384f325abf --- /dev/null +++ b/verify/boilerplate/boilerplate.bzl.txt @@ -0,0 +1,14 @@ +# Copyright YEAR The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/verify/boilerplate/boilerplate.go.txt b/verify/boilerplate/boilerplate.go.txt new file mode 100644 index 0000000000..59e740c1ee --- /dev/null +++ b/verify/boilerplate/boilerplate.go.txt @@ -0,0 +1,16 @@ +/* +Copyright YEAR The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + diff --git a/verify/boilerplate/boilerplate.py.txt b/verify/boilerplate/boilerplate.py.txt new file mode 100644 index 0000000000..384f325abf --- /dev/null +++ b/verify/boilerplate/boilerplate.py.txt @@ -0,0 +1,14 @@ +# Copyright YEAR The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/verify/boilerplate/boilerplate.sh.txt b/verify/boilerplate/boilerplate.sh.txt new file mode 100644 index 0000000000..384f325abf --- /dev/null +++ b/verify/boilerplate/boilerplate.sh.txt @@ -0,0 +1,14 @@ +# Copyright YEAR The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/verify/boilerplate/test/fail.go b/verify/boilerplate/test/fail.go new file mode 100644 index 0000000000..16159c5ac0 --- /dev/null +++ b/verify/boilerplate/test/fail.go @@ -0,0 +1,19 @@ +/* +Copyright 2014 The Kubernetes Authors. + +fail + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package test diff --git a/verify/boilerplate/test/fail.py b/verify/boilerplate/test/fail.py new file mode 100644 index 0000000000..cbdd06ff8a --- /dev/null +++ b/verify/boilerplate/test/fail.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +# Copyright 2015 The Kubernetes Authors. +# +# failed +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/verify/boilerplate/test/pass.go b/verify/boilerplate/test/pass.go new file mode 100644 index 0000000000..7508448aae --- /dev/null +++ b/verify/boilerplate/test/pass.go @@ -0,0 +1,17 @@ +/* +Copyright 2014 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package test diff --git a/verify/boilerplate/test/pass.py b/verify/boilerplate/test/pass.py new file mode 100644 index 0000000000..5b7ce29a25 --- /dev/null +++ b/verify/boilerplate/test/pass.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +# Copyright 2015 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +True diff --git a/verify/go_install_from_commit.sh b/verify/go_install_from_commit.sh new file mode 100755 index 0000000000..ee6fd0d9c1 --- /dev/null +++ b/verify/go_install_from_commit.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail + +PKG=$1 +COMMIT=$2 +export GOPATH=$3 +export GOBIN="$GOPATH/bin" + +go get -d -u "${PKG}" +cd "${GOPATH}/src/${PKG}" +git checkout -q "${COMMIT}" +go install "${PKG}" diff --git a/verify/verify-bazel.sh b/verify/verify-bazel.sh new file mode 100755 index 0000000000..9929547a38 --- /dev/null +++ b/verify/verify-bazel.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +# Copyright 2016 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO(fejta): delete this file + +set -o errexit +set -o nounset +set -o pipefail +set -o xtrace + +bazel test @io_k8s_repo_infra//hack:verify-deps diff --git a/verify/verify-boilerplate.sh b/verify/verify-boilerplate.sh new file mode 100755 index 0000000000..e9e631e76d --- /dev/null +++ b/verify/verify-boilerplate.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Copyright 2014 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO(fejta): delete this file + +set -o errexit +set -o nounset +set -o pipefail +set -o xtrace + +bazel test @io_k8s_repo_infra//hack:verify-boilerplate diff --git a/verify/verify-errexit.sh b/verify/verify-errexit.sh new file mode 100755 index 0000000000..a30f31c48c --- /dev/null +++ b/verify/verify-errexit.sh @@ -0,0 +1,48 @@ +#!/bin/bash +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script will verify that the specified script files have +# "set -o errexit" turned on at some point. +# +# Usage: verify-errexit.sh [ dir | file ... ] +# default args is the root of our source tree + +set -o errexit +set -o nounset +set -o pipefail + +REPO_ROOT=$(dirname "${BASH_SOURCE}")/.. + +if [[ "$*" != "" ]]; then + args="$*" +else + args=$(ls "${REPO_ROOT}" | grep -v vendor | grep -v glide) +fi + +# Gather the list of files that appear to be shell scripts. +# Meaning they have some form of "#!...sh" as a line in them. +shFiles=$(grep -rl '^#!.*sh$' ${args}) + +rc="0" +for file in ${shFiles}; do + grep "set -o errexit" ${file} &> /dev/null && continue + grep "set -[a-z]*e" ${file} &> /dev/null && continue + + echo ${file}: appears to be missing \"set -o errexit\" + rc="1" +done + +exit ${rc} + diff --git a/verify/verify-go-src.sh b/verify/verify-go-src.sh new file mode 100755 index 0000000000..cdc82f1640 --- /dev/null +++ b/verify/verify-go-src.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# Copyright 2017 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -o nounset +set -o pipefail +set -o xtrace + +bazel test @io_k8s_repo_infra//hack:verify-all From 8055ba6756b160b3869175f9fcbb59f2e7c693ed Mon Sep 17 00:00:00 2001 From: Jimmi Dyson Date: Fri, 3 Jul 2020 11:00:44 +0100 Subject: [PATCH 14/14] chore: Fix up path to repo-infra --- scripts/pre-commit.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/pre-commit.sh b/scripts/pre-commit.sh index 18cbb4d8a4..d019ab1ecb 100755 --- a/scripts/pre-commit.sh +++ b/scripts/pre-commit.sh @@ -124,7 +124,7 @@ echo "Verifying Gofmt" ./hack/go-tools/verify-gofmt.sh echo "Checking boilerplate text" -./vendor/repo-infra/hack/verify_boilerplate.py --rootdir="${ROOT_DIR}" +./vendor/k8s.io/repo-infra/hack/verify_boilerplate.py --rootdir="${ROOT_DIR}" echo "Linting" golangci-lint run --timeout=5m