diff --git a/.gitignore b/.gitignore index ea1ad714592..b15c5471ced 100644 --- a/.gitignore +++ b/.gitignore @@ -17,5 +17,6 @@ /_meta/kibana.generated/* /fields.yml /apm-server.template-es.json +/vendor/github.com/elastic/beats/libbeat/fields.yml html_docs diff --git a/Makefile b/Makefile index 362d65f674f..e8fe9729a4d 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,6 @@ ES_BEATS?=./_beats BEATS_VERSION?=master NOW=$(shell date -u '+%Y-%m-%dT%H:%M:%S') GOBUILD_FLAGS=-i -ldflags "-s -X $(BEAT_PATH)/vendor/github.com/elastic/beats/libbeat/version.buildTime=$(NOW) -X $(BEAT_PATH)/vendor/github.com/elastic/beats/libbeat/version.commit=$(COMMIT_ID)" -FIELDS_FILE_PATH=model MAGE_IMPORT_PATH=${BEAT_PATH}/vendor/github.com/magefile/mage # Path to the libbeat Makefile @@ -21,11 +20,12 @@ MAGE_IMPORT_PATH=${BEAT_PATH}/vendor/github.com/magefile/mage update-beats: rm -rf vendor/github.com/elastic/beats @govendor fetch github.com/elastic/beats/...@$(BEATS_VERSION) - @govendor fetch github.com/elastic/beats/libbeat/generator/fields@$(BEATS_VERSION) github.com/elastic/beats/libbeat/kibana@$(BEATS_VERSION) github.com/elastic/beats/libbeat/outputs/transport/transptest@$(BEATS_VERSION) + @govendor fetch github.com/elastic/beats/libbeat/generator/fields@$(BEATS_VERSION) github.com/elastic/beats/libbeat/kibana@$(BEATS_VERSION) github.com/elastic/beats/libbeat/outputs/transport/transptest@$(BEATS_VERSION) github.com/elastic/beats/libbeat/scripts/cmd/global_fields@$(BEATS_VERSION) @BEATS_VERSION=$(BEATS_VERSION) script/update_beats.sh @$(MAKE) update @echo --- Use this commit message: Update beats framework to `cat vendor/vendor.json | python -c 'import sys, json; print([p["revision"] for p in json.load(sys.stdin)["package"] if p["path"] == "github.com/elastic/beats/libbeat/beat"][0][:7])'` + .PHONY: is-beats-updated is-beats-updated: python-env @$(PYTHON_ENV)/bin/python ./script/is_beats_updated.py ${BEATS_VERSION} diff --git a/NOTICE.txt b/NOTICE.txt index 670b56fad86..949d8ec82b7 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -292,7 +292,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------- Dependency: github.com/elastic/beats Version: master -Revision: a314e34ea823c39f0838be59de7f299831a8a018 +Revision: 9b268ba852ae93e1080c745a1031c72243f64df0 License type (autodetected): Apache-2.0 ./vendor/github.com/elastic/beats/LICENSE.txt: -------------------------------------------------------------------- @@ -358,7 +358,7 @@ Apache License 2.0 -------------------------------------------------------------------- Dependency: github.com/elastic/go-ucfg -Revision: 9c66f5c432b1d25bdb449a1e588d58b5d0cd7268 +Revision: 581f7b1fe9d84f4c18ef0694d6e0eb944a925dae License type (autodetected): Apache-2.0 ./vendor/github.com/elastic/go-ucfg/LICENSE: -------------------------------------------------------------------- @@ -1630,11 +1630,11 @@ DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------- Dependency: github.com/Shopify/sarama -Revision: 32b4ad5c9537ed14e471779b76713ff65420db39 +Revision: d1575e4abe04acbbe8ac766320585cdf271dd189 License type (autodetected): MIT ./vendor/github.com/Shopify/sarama/LICENSE: -------------------------------------------------------------------- -Copyright (c) 2013 Evan Huus +Copyright (c) 2013 Shopify Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/_beats/dev-tools/jenkins_ci.ps1 b/_beats/dev-tools/jenkins_ci.ps1 index a27381a36e9..39abe9fad17 100755 --- a/_beats/dev-tools/jenkins_ci.ps1 +++ b/_beats/dev-tools/jenkins_ci.ps1 @@ -21,6 +21,14 @@ $env:PATH = "$env:GOPATH\bin;C:\tools\mingw64\bin;$env:PATH" # each run starts from a clean slate. $env:MAGEFILE_CACHE = "$env:WORKSPACE\.magefile" +exec { go install github.com/elastic/beats/vendor/github.com/magefile/mage } +exec { go get -u github.com/jstemmer/go-junit-report } + +echo "Building libbeat fields.yml" +cd libbeat +exec { mage fields } +cd .. + if (Test-Path "$env:beat") { cd "$env:beat" } else { @@ -35,20 +43,11 @@ New-Item -ItemType directory -Path build\coverage | Out-Null New-Item -ItemType directory -Path build\system-tests | Out-Null New-Item -ItemType directory -Path build\system-tests\run | Out-Null -exec { go get -u github.com/jstemmer/go-junit-report } +echo "Building fields.yml" +exec { mage fields } echo "Building $env:beat" -exec { go build } "Build FAILURE" - -# always build the libbeat fields -cp ..\libbeat\_meta\fields.common.yml ..\libbeat\_meta\fields.generated.yml -cat ..\libbeat\processors\*\_meta\fields.yml | Out-File -append -encoding UTF8 -filepath ..\libbeat\_meta\fields.generated.yml -cp ..\libbeat\_meta\fields.generated.yml ..\libbeat\fields.yml - -if ($env:beat -eq "metricbeat") { - cp .\_meta\fields.common.yml .\_meta\fields.generated.yml - python .\scripts\fields_collector.py | out-file -append -encoding UTF8 -filepath .\_meta\fields.generated.yml -} +exec { mage build } "Build FAILURE" echo "Unit testing $env:beat" go test -v $(go list ./... | select-string -Pattern "vendor" -NotMatch) 2>&1 | Out-File -encoding UTF8 build/TEST-go-unit.out diff --git a/_beats/dev-tools/mage/fields.go b/_beats/dev-tools/mage/fields.go new file mode 100644 index 00000000000..8f658483ffb --- /dev/null +++ b/_beats/dev-tools/mage/fields.go @@ -0,0 +1,48 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "path/filepath" + + "github.com/magefile/mage/sh" +) + +// GenerateFieldsYAML generates a fields.yml file for a Beat. This will include +// the common fields specified by libbeat, the common fields for the Beat, +// and any additional fields.yml files you specify. +// +// fieldsFiles specifies additional directories to search recursively for files +// named fields.yml. The contents of each fields.yml will be included in the +// generated file. +func GenerateFieldsYAML(fieldsFiles ...string) error { + const globalFieldsCmdPath = "libbeat/scripts/cmd/global_fields/main.go" + + beatsDir, err := ElasticBeatsDir() + if err != nil { + return err + } + + globalFieldsCmd := sh.RunCmd("go", "run", + filepath.Join(beatsDir, globalFieldsCmdPath), + "-es_beats_path", beatsDir, + "-beat_path", CWD(), + ) + + return globalFieldsCmd(fieldsFiles...) +} diff --git a/_beats/dev-tools/packaging/templates/common/magefile.go.tmpl b/_beats/dev-tools/packaging/templates/common/magefile.go.tmpl index 1a9822d2491..db7f7132110 100644 --- a/_beats/dev-tools/packaging/templates/common/magefile.go.tmpl +++ b/_beats/dev-tools/packaging/templates/common/magefile.go.tmpl @@ -70,3 +70,8 @@ func TestPackages() error { func Update() error { return sh.Run("make", "update") } + +// Fields generates a fields.yml for the Beat. +func Fields() error { + return mage.GenerateFieldsYAML("protos") +} diff --git a/_beats/libbeat/Makefile b/_beats/libbeat/Makefile index d63820feabc..93b34f77104 100644 --- a/_beats/libbeat/Makefile +++ b/_beats/libbeat/Makefile @@ -1,7 +1,6 @@ BEAT_NAME=libbeat TEST_ENVIRONMENT?=true SYSTEM_TESTS=true -FIELDS_FILE_PATH=processors include scripts/Makefile diff --git a/_beats/libbeat/magefile.go b/_beats/libbeat/magefile.go new file mode 100644 index 00000000000..939eac4adb0 --- /dev/null +++ b/_beats/libbeat/magefile.go @@ -0,0 +1,39 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// +build mage + +package main + +import ( + "github.com/elastic/beats/dev-tools/mage" +) + +// Build builds the Beat binary. +func Build() error { + return mage.Build(mage.DefaultBuildArgs()) +} + +// Clean cleans all generated files and build artifacts. +func Clean() error { + return mage.Clean() +} + +// Fields generates a fields.yml for the Beat. +func Fields() error { + return mage.GenerateFieldsYAML("processors") +} diff --git a/_beats/libbeat/scripts/Makefile b/_beats/libbeat/scripts/Makefile index d2b109cbcbb..8e8a0ac3887 100755 --- a/_beats/libbeat/scripts/Makefile +++ b/_beats/libbeat/scripts/Makefile @@ -20,7 +20,9 @@ ELASTIC_LICENSE_FILE?=../licenses/ELASTIC-LICENSE.txt SECCOMP_BINARY?=${BEAT_NAME} SECCOMP_BLACKLIST?=${ES_BEATS}/libbeat/common/seccomp/seccomp-profiler-blacklist.txt SECCOMP_ALLOWLIST?=${ES_BEATS}/libbeat/common/seccomp/seccomp-profiler-allow.txt +MAGE_PRESENT := $(shell command -v mage 2> /dev/null) MAGE_IMPORT_PATH?=github.com/elastic/beats/vendor/github.com/magefile/mage +export MAGE_IMPORT_PATH space:=$() # comma:=, @@ -304,9 +306,8 @@ coverage-report: .PHONY: fields -fields: - @go run ${ES_BEATS}/libbeat/scripts/cmd/global_fields/main.go --es_beats_path $(ES_BEATS) --beat_path $(PWD) $(FIELDS_FILE_PATH) - +fields: mage + mage -v fields .PHONY: libbeat_fields libbeat_fields: @@ -442,7 +443,9 @@ seccomp-package: .PHONY: mage mage: - @go install ${MAGE_IMPORT_PATH} +ifndef MAGE_PRESENT + go install ${MAGE_IMPORT_PATH} +endif .PHONY: release release: mage diff --git a/_beats/libbeat/scripts/cmd/global_fields/main.go b/_beats/libbeat/scripts/cmd/global_fields/main.go index 8bd5b691506..74fc94a603d 100644 --- a/_beats/libbeat/scripts/cmd/global_fields/main.go +++ b/_beats/libbeat/scripts/cmd/global_fields/main.go @@ -31,41 +31,46 @@ func main() { beatPath := flag.String("beat_path", ".", "Path to your Beat") flag.Parse() - beatFieldsPath := flag.Args() + beatFieldsPaths := flag.Args() name := filepath.Base(*beatPath) - err := os.MkdirAll(filepath.Join(*beatPath, "_meta"), 0744) + if *beatPath == "" { + fmt.Fprintf(os.Stderr, "beat_path cannot be empty") + os.Exit(1) + } + + err := os.MkdirAll(filepath.Join(*beatPath, "_meta"), 0755) if err != nil { - fmt.Printf("Cannot create _meta dir for %s: %v\n", name, err) + fmt.Fprintf(os.Stderr, "Cannot create _meta dir for %s: %+v\n", name, err) os.Exit(1) } - if len(beatFieldsPath) == 0 { + if len(beatFieldsPaths) == 0 { fmt.Println("No field files to collect") err = fields.AppendFromLibbeat(*esBeatsPath, *beatPath) if err != nil { - fmt.Printf("Cannot generate global fields.yml for %s: %v\n", name, err) + fmt.Fprintf(os.Stderr, "Cannot generate global fields.yml for %s: %+v\n", name, err) os.Exit(2) } return } - if *beatPath == "" { - fmt.Println("beat_path cannot be empty") - os.Exit(1) - } + var fieldsFiles []*fields.YmlFile + for _, fieldsFilePath := range beatFieldsPaths { + pathToModules := filepath.Join(*beatPath, fieldsFilePath) - pathToModules := filepath.Join(*beatPath, beatFieldsPath[0]) - fieldFiles, err := fields.CollectModuleFiles(pathToModules) - if err != nil { - fmt.Printf("Cannot collect fields.yml files: %v\n", err) - os.Exit(2) + fieldsFile, err := fields.CollectModuleFiles(pathToModules) + if err != nil { + fmt.Fprintf(os.Stderr, "Cannot collect fields.yml files: %+v\n", err) + os.Exit(2) + } + fieldsFiles = append(fieldsFiles, fieldsFile...) } - err = fields.Generate(*esBeatsPath, *beatPath, fieldFiles) + err = fields.Generate(*esBeatsPath, *beatPath, fieldsFiles) if err != nil { - fmt.Printf("Cannot generate global fields.yml file for %s: %v\n", name, err) + fmt.Fprintf(os.Stderr, "Cannot generate global fields.yml file for %s: %+v\n", name, err) os.Exit(3) } diff --git a/_beats/libbeat/tests/system/requirements.txt b/_beats/libbeat/tests/system/requirements.txt index a98ac293088..743fdf14c66 100644 --- a/_beats/libbeat/tests/system/requirements.txt +++ b/_beats/libbeat/tests/system/requirements.txt @@ -28,4 +28,4 @@ texttable==0.9.1 urllib3==1.22 websocket-client==0.47.0 parameterized==0.6.1 -jsondiff +jsondiff==1.1.2 diff --git a/_beats/testing/environments/args.yml b/_beats/testing/environments/args.yml index db44bdda79b..033b0db0bac 100644 --- a/_beats/testing/environments/args.yml +++ b/_beats/testing/environments/args.yml @@ -7,4 +7,4 @@ services: args: DOWNLOAD_URL: https://snapshots.elastic.co/downloads ELASTIC_VERSION: 7.0.0-alpha1-SNAPSHOT - CACHE_BUST: 20180501 + CACHE_BUST: 20180723 diff --git a/_beats/testing/environments/docker/kafka/Dockerfile b/_beats/testing/environments/docker/kafka/Dockerfile index 4d9f0d053e2..cda937e2587 100644 --- a/_beats/testing/environments/docker/kafka/Dockerfile +++ b/_beats/testing/environments/docker/kafka/Dockerfile @@ -4,7 +4,7 @@ ENV KAFKA_HOME /kafka # The advertised host is kafka. This means it will not work if container is started locally and connected from localhost to it ENV KAFKA_ADVERTISED_HOST kafka ENV KAFKA_LOGS_DIR="/kafka-logs" -ENV KAFKA_VERSION 1.0.0 +ENV KAFKA_VERSION 1.1.1 ENV _JAVA_OPTIONS "-Djava.net.preferIPv4Stack=true" ENV TERM=linux diff --git a/_beats/vendor/vendor.json b/_beats/vendor/vendor.json index 5221b45453f..c557ced2e25 100644 --- a/_beats/vendor/vendor.json +++ b/_beats/vendor/vendor.json @@ -9,22 +9,21 @@ "revisionTime": "2017-05-24T00:36:31Z" }, { - "checksumSHA1": "wshQ6/MnQx8nH6pgufu4kJEX6JI=", + "checksumSHA1": "rDoYEddGYvQT73l9V8Uqjk7SHAY=", + "origin": "github.com/andrewkroh/xxhash", "path": "github.com/OneOfOne/xxhash", - "revision": "6def279d2ce6c81a79dd1c1be580f03bb216fb8a", - "revisionTime": "2018-05-30T13:49:54Z", - "version": "v1.2.2", - "versionExact": "v1.2.2" + "revision": "2c166c65de755bdafa6ae2959c10ea9df6e8b3e5", + "revisionTime": "2018-07-23T14:32:41Z" }, { - "checksumSHA1": "pH1jOw5Kfigc2tteo6KlaU9+JE8=", + "checksumSHA1": "xSwVjXDGIMoADDte4hBjra6ldGk=", "origin": "github.com/urso/sarama", "path": "github.com/Shopify/sarama", - "revision": "32b4ad5c9537ed14e471779b76713ff65420db39", + "revision": "d1575e4abe04acbbe8ac766320585cdf271dd189", "revisionTime": "2016-11-23T00:27:23Z", "tree": true, - "version": "v1.16.0/enh/offset-replica-id", - "versionExact": "v1.16.0/enh/offset-replica-id" + "version": "v1.17.0/enh/offset-replica-id", + "versionExact": "v1.17.0/enh/offset-replica-id" }, { "checksumSHA1": "DYv6Q1+VfnUVxMwvk5IshAClLvw=", @@ -117,12 +116,10 @@ "revisionTime": "2016-09-02T18:42:37Z" }, { - "checksumSHA1": "R1Q34Pfnt197F/nCOO9kG8c+Z90=", - "path": "github.com/boltdb/bolt", - "revision": "2f1ce7a837dcb8da3ec595b1dac9d0632f0f99e8", - "revisionTime": "2017-07-17T17:11:48Z", - "version": "v1.3.1", - "versionExact": "v1.3.1" + "checksumSHA1": "qFaKrhSla38BRAyaGz2UaZvH/Dk=", + "path": "github.com/coreos/bbolt", + "revision": "af9db2027c98c61ecd8e17caa5bd265792b9b9a2", + "revisionTime": "2018-03-18T00:15:26Z" }, { "checksumSHA1": "dvabztWVQX8f6oMLRyv4dLH+TGY=", @@ -495,12 +492,6 @@ "version": "v0.0.4", "versionExact": "v0.0.4" }, - { - "path": "github.com/elastic/go-structform/internal/ubjson", - "revision": "v0.0.4", - "version": "v0.0.4", - "versionExact": "v0.0.4" - }, { "checksumSHA1": "s7k0vEuuqkoPXU0FtrD6Y0jxd7U=", "path": "github.com/elastic/go-structform/internal/unsafe", @@ -509,12 +500,6 @@ "version": "v0.0.4", "versionExact": "v0.0.4" }, - { - "path": "github.com/elastic/go-structform/internal/visitors", - "revision": "v0.0.4", - "version": "v0.0.4", - "versionExact": "v0.0.4" - }, { "checksumSHA1": "KTDpLMZRFtSVeUuXdaz5o5ehzfI=", "path": "github.com/elastic/go-structform/json", @@ -638,52 +623,52 @@ "versionExact": "v0.0.1" }, { - "checksumSHA1": "d0qibYdQy5G1YqI5H+xNC0QJ66g=", + "checksumSHA1": "MK8/w0Idj7kRBUiBabARPdm9hOo=", "path": "github.com/elastic/go-ucfg", - "revision": "9c66f5c432b1d25bdb449a1e588d58b5d0cd7268", - "revisionTime": "2018-07-04T14:42:58Z", - "version": "v0.6.0", - "versionExact": "v0.6.0" + "revision": "581f7b1fe9d84f4c18ef0694d6e0eb944a925dae", + "revisionTime": "2018-07-13T14:04:29Z", + "version": "v0.6.1", + "versionExact": "v0.6.1" }, { "checksumSHA1": "X+R/CD8SokJrmlxFTx2nSevRDhQ=", "path": "github.com/elastic/go-ucfg/cfgutil", - "revision": "9c66f5c432b1d25bdb449a1e588d58b5d0cd7268", - "revisionTime": "2018-07-04T14:42:58Z", - "version": "v0.6.0", - "versionExact": "v0.6.0" + "revision": "581f7b1fe9d84f4c18ef0694d6e0eb944a925dae", + "revisionTime": "2018-07-13T14:04:29Z", + "version": "v0.6.1", + "versionExact": "v0.6.1" }, { - "checksumSHA1": "dShGF53hLUufO70RAjju+RT0fHY=", + "checksumSHA1": "zC8mCPW/pPPNcuHQOc/B/Ej1W1U=", "path": "github.com/elastic/go-ucfg/flag", - "revision": "9c66f5c432b1d25bdb449a1e588d58b5d0cd7268", - "revisionTime": "2018-07-04T14:42:58Z", - "version": "v0.6.0", - "versionExact": "v0.6.0" + "revision": "581f7b1fe9d84f4c18ef0694d6e0eb944a925dae", + "revisionTime": "2018-07-13T14:04:29Z", + "version": "v0.6.1", + "versionExact": "v0.6.1" }, { "checksumSHA1": "esXpiQlEvTOUwsE0nNesso8albo=", "path": "github.com/elastic/go-ucfg/internal/parse", - "revision": "9c66f5c432b1d25bdb449a1e588d58b5d0cd7268", - "revisionTime": "2018-07-04T14:42:58Z", - "version": "v0.6.0", - "versionExact": "v0.6.0" + "revision": "581f7b1fe9d84f4c18ef0694d6e0eb944a925dae", + "revisionTime": "2018-07-13T14:04:29Z", + "version": "v0.6.1", + "versionExact": "v0.6.1" }, { "checksumSHA1": "5mXUhhlPdvcAFKiQENInTJWrtQM=", "path": "github.com/elastic/go-ucfg/json", - "revision": "9c66f5c432b1d25bdb449a1e588d58b5d0cd7268", - "revisionTime": "2018-07-04T14:42:58Z", - "version": "v0.6.0", - "versionExact": "v0.6.0" + "revision": "581f7b1fe9d84f4c18ef0694d6e0eb944a925dae", + "revisionTime": "2018-07-13T14:04:29Z", + "version": "v0.6.1", + "versionExact": "v0.6.1" }, { "checksumSHA1": "Bg6vistPQLftv2fEYB7GWwSExv8=", "path": "github.com/elastic/go-ucfg/yaml", - "revision": "9c66f5c432b1d25bdb449a1e588d58b5d0cd7268", - "revisionTime": "2018-07-04T14:42:58Z", - "version": "v0.6.0", - "versionExact": "v0.6.0" + "revision": "581f7b1fe9d84f4c18ef0694d6e0eb944a925dae", + "revisionTime": "2018-07-13T14:04:29Z", + "version": "v0.6.1", + "versionExact": "v0.6.1" }, { "checksumSHA1": "yu/X+qHftvfQlAnjPdYLwrDn2nI=", diff --git a/magefile.go b/magefile.go index 27ef354d1b3..06a30a30a9c 100644 --- a/magefile.go +++ b/magefile.go @@ -99,6 +99,10 @@ func Update() error { return sh.Run("make", "update") } +func Fields() error { + return mage.GenerateFieldsYAML(".") +} + // ----------------------------------------------------------------------------- // Customizations specific to apm-server. // - readme.md.tmpl used in packages is customized. diff --git a/model/metric/_meta/fields.yml b/model/metric/_meta/fields.yml index e69de29bb2d..dc3b9726f0e 100644 --- a/model/metric/_meta/fields.yml +++ b/model/metric/_meta/fields.yml @@ -0,0 +1,80 @@ +- key: system + title: "APM System Metrics" + description: > + System status metrics, like CPU and memory usage, that are collected from the operating system. + short_config: true + fields: + - name: system + type: group + description: > + `system` contains local system metrics. + fields: + + - name: cpu + type: group + description: > + `cpu` contains local CPU stats. + fields: + - name: total.norm.pct + type: scaled_float + format: percent + description: > + The percentage of CPU time spent by the process since the last event. + This value is normalized by the number of CPU cores and it ranges + from 0 to 100%. + + - name: memory + type: group + description: > + `memory` contains local memory stats. + fields: + - name: total + type: long + format: bytes + description: > + Total memory. + - name: actual + type: group + description: > + Actual memory used and free. + fields: + - name: free + type: long + format: bytes + description: > + Actual free memory in bytes. It is calculated based on the OS. On Linux it consists of the free memory + plus caches and buffers. On OSX it is a sum of free memory and the inactive memory. On Windows, it is equal + to `system.memory.free`. + + - name: process + type: group + description: > + `process` contains process metadata, CPU metrics, and memory metrics. + fields: + - name: cpu + type: group + description: > + `cpu` contains local CPU stats. + fields: + - name: total.norm.pct + type: scaled_float + format: percent + description: > + The percentage of CPU time spent by the process since the last event. + This value is normalized by the number of CPU cores and it ranges + from 0 to 100%. + - name: memory + type: group + description: Memory-specific statistics per process. + prefix: "[float]" + fields: + - name: size + type: long + format: bytes + description: > + The total virtual memory the process has. + - name: rss.bytes + type: long + format: bytes + description: > + The Resident Set Size. The amount of memory the process occupied in main memory (RAM). diff --git a/model/metric/system/_meta/fields.yml b/model/metric/system/_meta/fields.yml deleted file mode 100644 index dc3b9726f0e..00000000000 --- a/model/metric/system/_meta/fields.yml +++ /dev/null @@ -1,80 +0,0 @@ -- key: system - title: "APM System Metrics" - description: > - System status metrics, like CPU and memory usage, that are collected from the operating system. - short_config: true - fields: - - name: system - type: group - description: > - `system` contains local system metrics. - fields: - - - name: cpu - type: group - description: > - `cpu` contains local CPU stats. - fields: - - name: total.norm.pct - type: scaled_float - format: percent - description: > - The percentage of CPU time spent by the process since the last event. - This value is normalized by the number of CPU cores and it ranges - from 0 to 100%. - - - name: memory - type: group - description: > - `memory` contains local memory stats. - fields: - - name: total - type: long - format: bytes - description: > - Total memory. - - name: actual - type: group - description: > - Actual memory used and free. - fields: - - name: free - type: long - format: bytes - description: > - Actual free memory in bytes. It is calculated based on the OS. On Linux it consists of the free memory - plus caches and buffers. On OSX it is a sum of free memory and the inactive memory. On Windows, it is equal - to `system.memory.free`. - - - name: process - type: group - description: > - `process` contains process metadata, CPU metrics, and memory metrics. - fields: - - name: cpu - type: group - description: > - `cpu` contains local CPU stats. - fields: - - name: total.norm.pct - type: scaled_float - format: percent - description: > - The percentage of CPU time spent by the process since the last event. - This value is normalized by the number of CPU cores and it ranges - from 0 to 100%. - - name: memory - type: group - description: Memory-specific statistics per process. - prefix: "[float]" - fields: - - name: size - type: long - format: bytes - description: > - The total virtual memory the process has. - - name: rss.bytes - type: long - format: bytes - description: > - The Resident Set Size. The amount of memory the process occupied in main memory (RAM). diff --git a/tests/_meta/fields.yml b/tests/_meta/fields.test.yml similarity index 100% rename from tests/_meta/fields.yml rename to tests/_meta/fields.test.yml diff --git a/tests/fields_test.go b/tests/fields_test.go index f339b173d6d..772088c1f5d 100644 --- a/tests/fields_test.go +++ b/tests/fields_test.go @@ -84,7 +84,7 @@ func TestLoadFields(t *testing.T) { _, err := loadFields("non-existing") assert.NotNil(t, err) - fields, err := loadFields("./_meta/fields.yml") + fields, err := loadFields("./_meta/fields.test.yml") assert.Nil(t, err) expected := NewSet("transaction", "transaction.id", "transaction.context", "exception", "exception.http", "exception.http.url", "exception.http.meta", "exception.stacktrace") flattened := NewSet() @@ -94,7 +94,7 @@ func TestLoadFields(t *testing.T) { func TestFlattenFieldNames(t *testing.T) { - fields, _ := loadFields("./_meta/fields.yml") + fields, _ := loadFields("./_meta/fields.test.yml") expected := NewSet("transaction", "transaction.id", "transaction.context", "exception", "exception.http", "exception.http.url", "exception.http.meta", "exception.stacktrace") diff --git a/vendor/github.com/Shopify/sarama/CHANGELOG.md b/vendor/github.com/Shopify/sarama/CHANGELOG.md index 836841650c3..16d5829c995 100644 --- a/vendor/github.com/Shopify/sarama/CHANGELOG.md +++ b/vendor/github.com/Shopify/sarama/CHANGELOG.md @@ -1,5 +1,43 @@ # Changelog +#### Version 1.17.0 (2018-05-30) + +New Features: + - Add support for gzip compression levels + ([#1044](https://github.com/Shopify/sarama/pull/1044)). + - Add support for Metadata request/response pairs versions v1 to v5 + ([#1047](https://github.com/Shopify/sarama/pull/1047), + [#1069](https://github.com/Shopify/sarama/pull/1069)). + - Add versioning to JoinGroup request/response pairs + ([#1098](https://github.com/Shopify/sarama/pull/1098)) + - Add support for CreatePartitions, DeleteGroups, DeleteRecords request/response pairs + ([#1065](https://github.com/Shopify/sarama/pull/1065), + [#1096](https://github.com/Shopify/sarama/pull/1096), + [#1027](https://github.com/Shopify/sarama/pull/1027)). + - Add `Controller()` method to Client interface + ([#1063](https://github.com/Shopify/sarama/pull/1063)). + +Improvements: + - ConsumerMetadataReq/Resp has been migrated to FindCoordinatorReq/Resp + ([#1010](https://github.com/Shopify/sarama/pull/1010)). + - Expose missing protocol parts: `msgSet` and `recordBatch` + ([#1049](https://github.com/Shopify/sarama/pull/1049)). + - Add support for v1 DeleteTopics Request + ([#1052](https://github.com/Shopify/sarama/pull/1052)). + - Add support for Go 1.10 + ([#1064](https://github.com/Shopify/sarama/pull/1064)). + - Claim support for Kafka 1.1.0 + ([#1073](https://github.com/Shopify/sarama/pull/1073)). + +Bug Fixes: + - Fix FindCoordinatorResponse.encode to allow nil Coordinator + ([#1050](https://github.com/Shopify/sarama/pull/1050), + [#1051](https://github.com/Shopify/sarama/pull/1051)). + - Clear all metadata when we have the latest topic info + ([#1033](https://github.com/Shopify/sarama/pull/1033)). + - Make `PartitionConsumer.Close` idempotent + ([#1092](https://github.com/Shopify/sarama/pull/1092)). + #### Version 1.16.0 (2018-02-12) New Features: diff --git a/vendor/github.com/Shopify/sarama/LICENSE b/vendor/github.com/Shopify/sarama/LICENSE index 8121b63b1c4..d2bf4352f4c 100644 --- a/vendor/github.com/Shopify/sarama/LICENSE +++ b/vendor/github.com/Shopify/sarama/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2013 Evan Huus +Copyright (c) 2013 Shopify Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/vendor/github.com/Shopify/sarama/Makefile b/vendor/github.com/Shopify/sarama/Makefile index 58a39e4f34d..b9a453dd29c 100644 --- a/vendor/github.com/Shopify/sarama/Makefile +++ b/vendor/github.com/Shopify/sarama/Makefile @@ -4,7 +4,7 @@ default: fmt vet errcheck test test: echo "" > coverage.txt for d in `go list ./... | grep -v vendor`; do \ - go test -v -timeout 60s -race -coverprofile=profile.out -covermode=atomic $$d; \ + go test -p 1 -v -timeout 90s -race -coverprofile=profile.out -covermode=atomic $$d || exit 1; \ if [ -f profile.out ]; then \ cat profile.out >> coverage.txt; \ rm profile.out; \ @@ -14,8 +14,9 @@ test: vet: go vet ./... +# See https://github.com/kisielk/errcheck/pull/141 for details on ignorepkg errcheck: - errcheck github.com/Shopify/sarama/... + errcheck -ignorepkg fmt github.com/Shopify/sarama/... fmt: @if [ -n "$$(go fmt ./...)" ]; then echo 'Please run go fmt on your code.' && exit 1; fi diff --git a/vendor/github.com/Shopify/sarama/README.md b/vendor/github.com/Shopify/sarama/README.md index 28431f13eb8..4fc0cc600f8 100644 --- a/vendor/github.com/Shopify/sarama/README.md +++ b/vendor/github.com/Shopify/sarama/README.md @@ -21,7 +21,7 @@ You might also want to look at the [Frequently Asked Questions](https://github.c Sarama provides a "2 releases + 2 months" compatibility guarantee: we support the two latest stable releases of Kafka and Go, and we provide a two month grace period for older releases. This means we currently officially support -Go 1.9 and 1.8, and Kafka 1.0 through 0.10, although older releases are +Go 1.8 through 1.10, and Kafka 0.11 through 1.1, although older releases are still likely to work. Sarama follows semantic versioning and provides API stability via the gopkg.in service. diff --git a/vendor/github.com/Shopify/sarama/broker.go b/vendor/github.com/Shopify/sarama/broker.go index b759f8f7841..d836bee6d86 100644 --- a/vendor/github.com/Shopify/sarama/broker.go +++ b/vendor/github.com/Shopify/sarama/broker.go @@ -18,6 +18,7 @@ import ( type Broker struct { id int32 addr string + rack *string conf *Config correlationID int32 @@ -230,6 +231,18 @@ func (b *Broker) GetConsumerMetadata(request *ConsumerMetadataRequest) (*Consume return response, nil } +func (b *Broker) FindCoordinator(request *FindCoordinatorRequest) (*FindCoordinatorResponse, error) { + response := new(FindCoordinatorResponse) + + err := b.sendAndReceive(request, response) + + if err != nil { + return nil, err + } + + return response, nil +} + func (b *Broker) GetAvailableOffsets(request *OffsetRequest) (*OffsetResponse, error) { response := new(OffsetResponse) @@ -373,6 +386,17 @@ func (b *Broker) ApiVersions(request *ApiVersionsRequest) (*ApiVersionsResponse, return response, nil } +func (b *Broker) CreatePartitions(request *CreatePartitionsRequest) (*CreatePartitionsResponse, error) { + response := new(CreatePartitionsResponse) + + err := b.sendAndReceive(request, response) + if err != nil { + return nil, err + } + + return response, nil +} + func (b *Broker) CreateTopics(request *CreateTopicsRequest) (*CreateTopicsResponse, error) { response := new(CreateTopicsResponse) @@ -395,6 +419,17 @@ func (b *Broker) DeleteTopics(request *DeleteTopicsRequest) (*DeleteTopicsRespon return response, nil } +func (b *Broker) DeleteRecords(request *DeleteRecordsRequest) (*DeleteRecordsResponse, error) { + response := new(DeleteRecordsResponse) + + err := b.sendAndReceive(request, response) + if err != nil { + return nil, err + } + + return response, nil +} + func (b *Broker) DescribeAcls(request *DescribeAclsRequest) (*DescribeAclsResponse, error) { response := new(DescribeAclsResponse) @@ -504,6 +539,17 @@ func (b *Broker) AlterConfigs(request *AlterConfigsRequest) (*AlterConfigsRespon return response, nil } + +func (b *Broker) DeleteGroups(request *DeleteGroupsRequest) (*DeleteGroupsResponse, error) { + response := new(DeleteGroupsResponse) + + if err := b.sendAndReceive(request, response); err != nil { + return nil, err + } + + return response, nil +} + func (b *Broker) send(rb protocolBody, promiseResponse bool) (*responsePromise, error) { b.lock.Lock() defer b.lock.Unlock() @@ -569,7 +615,7 @@ func (b *Broker) sendAndReceive(req protocolBody, res versionedDecoder) error { } } -func (b *Broker) decode(pd packetDecoder) (err error) { +func (b *Broker) decode(pd packetDecoder, version int16) (err error) { b.id, err = pd.getInt32() if err != nil { return err @@ -585,6 +631,13 @@ func (b *Broker) decode(pd packetDecoder) (err error) { return err } + if version >= 1 { + b.rack, err = pd.getNullableString() + if err != nil { + return err + } + } + b.addr = net.JoinHostPort(host, fmt.Sprint(port)) if _, _, err := net.SplitHostPort(b.addr); err != nil { return err @@ -593,7 +646,7 @@ func (b *Broker) decode(pd packetDecoder) (err error) { return nil } -func (b *Broker) encode(pe packetEncoder) (err error) { +func (b *Broker) encode(pe packetEncoder, version int16) (err error) { host, portstr, err := net.SplitHostPort(b.addr) if err != nil { @@ -613,6 +666,13 @@ func (b *Broker) encode(pe packetEncoder) (err error) { pe.putInt32(int32(port)) + if version >= 1 { + err = pe.putNullableString(b.rack) + if err != nil { + return err + } + } + return nil } diff --git a/vendor/github.com/Shopify/sarama/client.go b/vendor/github.com/Shopify/sarama/client.go index 3dbfc4b06ff..019cb43735a 100644 --- a/vendor/github.com/Shopify/sarama/client.go +++ b/vendor/github.com/Shopify/sarama/client.go @@ -17,6 +17,9 @@ type Client interface { // altered after it has been created. Config() *Config + // Controller returns the cluster controller broker. + Controller() (*Broker, error) + // Brokers returns the current set of active brokers as retrieved from cluster metadata. Brokers() []*Broker @@ -97,6 +100,7 @@ type client struct { seedBrokers []*Broker deadSeeds []*Broker + controllerID int32 // cluster controller broker id brokers map[int32]*Broker // maps broker ids to brokers metadata map[string]map[int32]*PartitionMetadata // maps topics to partition ids to metadata coordinators map[string]int32 // Maps consumer group names to coordinating broker IDs @@ -379,6 +383,27 @@ func (client *client) GetOffset(topic string, partitionID int32, time int64) (in return offset, err } +func (client *client) Controller() (*Broker, error) { + if client.Closed() { + return nil, ErrClosedClient + } + + controller := client.cachedController() + if controller == nil { + if err := client.refreshMetadata(); err != nil { + return nil, err + } + controller = client.cachedController() + } + + if controller == nil { + return nil, ErrControllerNotAvailable + } + + _ = controller.Open(client.conf) + return controller, nil +} + func (client *client) Coordinator(consumerGroup string) (*Broker, error) { if client.Closed() { return nil, ErrClosedClient @@ -607,20 +632,7 @@ func (client *client) backgroundMetadataUpdater() { for { select { case <-ticker.C: - topics := []string{} - if !client.conf.Metadata.Full { - if specificTopics, err := client.Topics(); err != nil { - Logger.Println("Client background metadata topic load:", err) - break - } else if len(specificTopics) == 0 { - Logger.Println("Client background metadata update: no specific topics to update") - break - } else { - topics = specificTopics - } - } - - if err := client.RefreshMetadata(topics...); err != nil { + if err := client.refreshMetadata(); err != nil { Logger.Println("Client background metadata update:", err) } case <-client.closer: @@ -629,6 +641,26 @@ func (client *client) backgroundMetadataUpdater() { } } +func (client *client) refreshMetadata() error { + topics := []string{} + + if !client.conf.Metadata.Full { + if specificTopics, err := client.Topics(); err != nil { + return err + } else if len(specificTopics) == 0 { + return ErrNoTopicsToUpdateMetadata + } else { + topics = specificTopics + } + } + + if err := client.RefreshMetadata(topics...); err != nil { + return err + } + + return nil +} + func (client *client) tryRefreshMetadata(topics []string, attemptsRemaining int) error { retry := func(err error) error { if attemptsRemaining > 0 { @@ -645,12 +677,18 @@ func (client *client) tryRefreshMetadata(topics []string, attemptsRemaining int) } else { Logger.Printf("client/metadata fetching metadata for all topics from broker %s\n", broker.addr) } - response, err := broker.GetMetadata(&MetadataRequest{Topics: topics}) + + req := &MetadataRequest{Topics: topics} + if client.conf.Version.IsAtLeast(V0_10_0_0) { + req.Version = 1 + } + response, err := broker.GetMetadata(req) switch err.(type) { case nil: + allKnownMetaData := len(topics) == 0 // valid response, use it - shouldRetry, err := client.updateMetadata(response) + shouldRetry, err := client.updateMetadata(response, allKnownMetaData) if shouldRetry { Logger.Println("client/metadata found some partitions to be leaderless") return retry(err) // note: err can be nil @@ -674,7 +712,7 @@ func (client *client) tryRefreshMetadata(topics []string, attemptsRemaining int) } // if no fatal error, returns a list of topics that need retrying due to ErrLeaderNotAvailable -func (client *client) updateMetadata(data *MetadataResponse) (retry bool, err error) { +func (client *client) updateMetadata(data *MetadataResponse, allKnownMetaData bool) (retry bool, err error) { client.lock.Lock() defer client.lock.Unlock() @@ -686,6 +724,12 @@ func (client *client) updateMetadata(data *MetadataResponse) (retry bool, err er client.registerBroker(broker) } + client.controllerID = data.ControllerID + + if allKnownMetaData { + client.metadata = make(map[string]map[int32]*PartitionMetadata) + client.cachedPartitionsResults = make(map[string][maxPartitionIndex][]int32) + } for _, topic := range data.Topics { delete(client.metadata, topic.Name) delete(client.cachedPartitionsResults, topic.Name) @@ -735,8 +779,15 @@ func (client *client) cachedCoordinator(consumerGroup string) *Broker { return nil } -func (client *client) getConsumerMetadata(consumerGroup string, attemptsRemaining int) (*ConsumerMetadataResponse, error) { - retry := func(err error) (*ConsumerMetadataResponse, error) { +func (client *client) cachedController() *Broker { + client.lock.RLock() + defer client.lock.RUnlock() + + return client.brokers[client.controllerID] +} + +func (client *client) getConsumerMetadata(consumerGroup string, attemptsRemaining int) (*FindCoordinatorResponse, error) { + retry := func(err error) (*FindCoordinatorResponse, error) { if attemptsRemaining > 0 { Logger.Printf("client/coordinator retrying after %dms... (%d attempts remaining)\n", client.conf.Metadata.Retry.Backoff/time.Millisecond, attemptsRemaining) time.Sleep(client.conf.Metadata.Retry.Backoff) @@ -748,10 +799,11 @@ func (client *client) getConsumerMetadata(consumerGroup string, attemptsRemainin for broker := client.any(); broker != nil; broker = client.any() { Logger.Printf("client/coordinator requesting coordinator for consumergroup %s from %s\n", consumerGroup, broker.Addr()) - request := new(ConsumerMetadataRequest) - request.ConsumerGroup = consumerGroup + request := new(FindCoordinatorRequest) + request.CoordinatorKey = consumerGroup + request.CoordinatorType = CoordinatorGroup - response, err := broker.GetConsumerMetadata(request) + response, err := broker.FindCoordinator(request) if err != nil { Logger.Printf("client/coordinator request to broker %s failed: %s\n", broker.Addr(), err) diff --git a/vendor/github.com/Shopify/sarama/config.go b/vendor/github.com/Shopify/sarama/config.go index 29ea5c2b36e..a564b5c23e4 100644 --- a/vendor/github.com/Shopify/sarama/config.go +++ b/vendor/github.com/Shopify/sarama/config.go @@ -1,7 +1,10 @@ package sarama import ( + "compress/gzip" "crypto/tls" + "fmt" + "io/ioutil" "regexp" "time" @@ -99,6 +102,10 @@ type Config struct { // The type of compression to use on messages (defaults to no compression). // Similar to `compression.codec` setting of the JVM producer. Compression CompressionCodec + // The level of compression to use on messages. The meaning depends + // on the actual compression type used and defaults to default compression + // level for the codec. + CompressionLevel int // Generates partitioners for choosing the partition to send messages to // (defaults to hashing the message key). Similar to the `partitioner.class` // setting for the JVM producer. @@ -290,6 +297,7 @@ func NewConfig() *Config { c.Producer.Retry.Max = 3 c.Producer.Retry.Backoff = 100 * time.Millisecond c.Producer.Return.Errors = true + c.Producer.CompressionLevel = CompressionLevelDefault c.Consumer.Fetch.Min = 1 c.Consumer.Fetch.Default = 1024 * 1024 @@ -302,7 +310,7 @@ func NewConfig() *Config { c.ClientID = defaultClientID c.ChannelBufferSize = 256 - c.Version = minVersion + c.Version = MinVersion c.MetricRegistry = metrics.NewRegistry() return c @@ -409,6 +417,14 @@ func (c *Config) Validate() error { return ConfigurationError("lz4 compression requires Version >= V0_10_0_0") } + if c.Producer.Compression == CompressionGZIP { + if c.Producer.CompressionLevel != CompressionLevelDefault { + if _, err := gzip.NewWriterLevel(ioutil.Discard, c.Producer.CompressionLevel); err != nil { + return ConfigurationError(fmt.Sprintf("gzip compression does not work with level %d: %v", c.Producer.CompressionLevel, err)) + } + } + } + // validate the Consumer values switch { case c.Consumer.Fetch.Min <= 0: diff --git a/vendor/github.com/Shopify/sarama/consumer.go b/vendor/github.com/Shopify/sarama/consumer.go index 48d231cf984..96226ac5bf5 100644 --- a/vendor/github.com/Shopify/sarama/consumer.go +++ b/vendor/github.com/Shopify/sarama/consumer.go @@ -310,6 +310,7 @@ type partitionConsumer struct { trigger, dying chan none responseResult error + closeOnce sync.Once fetchSize int32 offset int64 @@ -412,7 +413,9 @@ func (child *partitionConsumer) AsyncClose() { // the dispatcher to exit its loop, which removes it from the consumer then closes its 'messages' and // 'errors' channel (alternatively, if the child is already at the dispatcher for some reason, that will // also just close itself) - close(child.dying) + child.closeOnce.Do(func() { + close(child.dying) + }) } func (child *partitionConsumer) Close() error { @@ -461,7 +464,6 @@ feederLoop: child.messages <- msg } child.broker.input <- child - expiryTicker.Stop() continue feederLoop } else { // current message has not been sent, return to select @@ -482,9 +484,6 @@ feederLoop: func (child *partitionConsumer) parseMessages(msgSet *MessageSet) ([]*ConsumerMessage, error) { var messages []*ConsumerMessage - var incomplete bool - prelude := true - for _, msgBlock := range msgSet.Messages { for _, msg := range msgBlock.Messages() { offset := msg.Offset @@ -492,29 +491,22 @@ func (child *partitionConsumer) parseMessages(msgSet *MessageSet) ([]*ConsumerMe baseOffset := msgBlock.Offset - msgBlock.Messages()[len(msgBlock.Messages())-1].Offset offset += baseOffset } - if prelude && offset < child.offset { + if offset < child.offset { continue } - prelude = false - - if offset >= child.offset { - messages = append(messages, &ConsumerMessage{ - Topic: child.topic, - Partition: child.partition, - Key: msg.Msg.Key, - Value: msg.Msg.Value, - Offset: offset, - Timestamp: msg.Msg.Timestamp, - BlockTimestamp: msgBlock.Msg.Timestamp, - }) - child.offset = offset + 1 - } else { - incomplete = true - } + messages = append(messages, &ConsumerMessage{ + Topic: child.topic, + Partition: child.partition, + Key: msg.Msg.Key, + Value: msg.Msg.Value, + Offset: offset, + Timestamp: msg.Msg.Timestamp, + BlockTimestamp: msgBlock.Msg.Timestamp, + }) + child.offset = offset + 1 } } - - if incomplete || len(messages) == 0 { + if len(messages) == 0 { return nil, ErrIncompleteResponse } return messages, nil @@ -522,42 +514,25 @@ func (child *partitionConsumer) parseMessages(msgSet *MessageSet) ([]*ConsumerMe func (child *partitionConsumer) parseRecords(batch *RecordBatch) ([]*ConsumerMessage, error) { var messages []*ConsumerMessage - var incomplete bool - prelude := true - originalOffset := child.offset - for _, rec := range batch.Records { offset := batch.FirstOffset + rec.OffsetDelta - if prelude && offset < child.offset { + if offset < child.offset { continue } - prelude = false - - if offset >= child.offset { - messages = append(messages, &ConsumerMessage{ - Topic: child.topic, - Partition: child.partition, - Key: rec.Key, - Value: rec.Value, - Offset: offset, - Timestamp: batch.FirstTimestamp.Add(rec.TimestampDelta), - Headers: rec.Headers, - }) - child.offset = offset + 1 - } else { - incomplete = true - } - } - - if incomplete { + messages = append(messages, &ConsumerMessage{ + Topic: child.topic, + Partition: child.partition, + Key: rec.Key, + Value: rec.Value, + Offset: offset, + Timestamp: batch.FirstTimestamp.Add(rec.TimestampDelta), + Headers: rec.Headers, + }) + child.offset = offset + 1 + } + if len(messages) == 0 { return nil, ErrIncompleteResponse } - - child.offset = batch.FirstOffset + int64(batch.LastOffsetDelta) + 1 - if child.offset <= originalOffset { - return nil, ErrConsumerOffsetNotAdvanced - } - return messages, nil } @@ -610,14 +585,14 @@ func (child *partitionConsumer) parseResponse(response *FetchResponse) ([]*Consu switch records.recordsType { case legacyRecords: - messageSetMessages, err := child.parseMessages(records.msgSet) + messageSetMessages, err := child.parseMessages(records.MsgSet) if err != nil { return nil, err } messages = append(messages, messageSetMessages...) case defaultRecords: - recordBatchMessages, err := child.parseRecords(records.recordBatch) + recordBatchMessages, err := child.parseRecords(records.RecordBatch) if err != nil { return nil, err } diff --git a/vendor/github.com/Shopify/sarama/consumer_metadata_request.go b/vendor/github.com/Shopify/sarama/consumer_metadata_request.go index 483be3354df..4de45e7bf50 100644 --- a/vendor/github.com/Shopify/sarama/consumer_metadata_request.go +++ b/vendor/github.com/Shopify/sarama/consumer_metadata_request.go @@ -5,12 +5,19 @@ type ConsumerMetadataRequest struct { } func (r *ConsumerMetadataRequest) encode(pe packetEncoder) error { - return pe.putString(r.ConsumerGroup) + tmp := new(FindCoordinatorRequest) + tmp.CoordinatorKey = r.ConsumerGroup + tmp.CoordinatorType = CoordinatorGroup + return tmp.encode(pe) } func (r *ConsumerMetadataRequest) decode(pd packetDecoder, version int16) (err error) { - r.ConsumerGroup, err = pd.getString() - return err + tmp := new(FindCoordinatorRequest) + if err := tmp.decode(pd, version); err != nil { + return err + } + r.ConsumerGroup = tmp.CoordinatorKey + return nil } func (r *ConsumerMetadataRequest) key() int16 { diff --git a/vendor/github.com/Shopify/sarama/consumer_metadata_response.go b/vendor/github.com/Shopify/sarama/consumer_metadata_response.go index 6b9632bbafe..442cbde7ac0 100644 --- a/vendor/github.com/Shopify/sarama/consumer_metadata_response.go +++ b/vendor/github.com/Shopify/sarama/consumer_metadata_response.go @@ -14,20 +14,18 @@ type ConsumerMetadataResponse struct { } func (r *ConsumerMetadataResponse) decode(pd packetDecoder, version int16) (err error) { - tmp, err := pd.getInt16() - if err != nil { - return err - } - r.Err = KError(tmp) + tmp := new(FindCoordinatorResponse) - coordinator := new(Broker) - if err := coordinator.decode(pd); err != nil { + if err := tmp.decode(pd, version); err != nil { return err } - if coordinator.addr == ":0" { + + r.Err = tmp.Err + + r.Coordinator = tmp.Coordinator + if tmp.Coordinator == nil { return nil } - r.Coordinator = coordinator // this can all go away in 2.0, but we have to fill in deprecated fields to maintain // backwards compatibility @@ -47,28 +45,22 @@ func (r *ConsumerMetadataResponse) decode(pd packetDecoder, version int16) (err } func (r *ConsumerMetadataResponse) encode(pe packetEncoder) error { - pe.putInt16(int16(r.Err)) - if r.Coordinator != nil { - host, portstr, err := net.SplitHostPort(r.Coordinator.Addr()) - if err != nil { - return err - } - port, err := strconv.ParseInt(portstr, 10, 32) - if err != nil { - return err - } - pe.putInt32(r.Coordinator.ID()) - if err := pe.putString(host); err != nil { - return err - } - pe.putInt32(int32(port)) - return nil + if r.Coordinator == nil { + r.Coordinator = new(Broker) + r.Coordinator.id = r.CoordinatorID + r.Coordinator.addr = net.JoinHostPort(r.CoordinatorHost, strconv.Itoa(int(r.CoordinatorPort))) + } + + tmp := &FindCoordinatorResponse{ + Version: 0, + Err: r.Err, + Coordinator: r.Coordinator, } - pe.putInt32(r.CoordinatorID) - if err := pe.putString(r.CoordinatorHost); err != nil { + + if err := tmp.encode(pe); err != nil { return err } - pe.putInt32(r.CoordinatorPort) + return nil } diff --git a/vendor/github.com/Shopify/sarama/delete_groups_request.go b/vendor/github.com/Shopify/sarama/delete_groups_request.go new file mode 100644 index 00000000000..305a324ac2d --- /dev/null +++ b/vendor/github.com/Shopify/sarama/delete_groups_request.go @@ -0,0 +1,30 @@ +package sarama + +type DeleteGroupsRequest struct { + Groups []string +} + +func (r *DeleteGroupsRequest) encode(pe packetEncoder) error { + return pe.putStringArray(r.Groups) +} + +func (r *DeleteGroupsRequest) decode(pd packetDecoder, version int16) (err error) { + r.Groups, err = pd.getStringArray() + return +} + +func (r *DeleteGroupsRequest) key() int16 { + return 42 +} + +func (r *DeleteGroupsRequest) version() int16 { + return 0 +} + +func (r *DeleteGroupsRequest) requiredVersion() KafkaVersion { + return V1_1_0_0 +} + +func (r *DeleteGroupsRequest) AddGroup(group string) { + r.Groups = append(r.Groups, group) +} diff --git a/vendor/github.com/Shopify/sarama/delete_groups_response.go b/vendor/github.com/Shopify/sarama/delete_groups_response.go new file mode 100644 index 00000000000..c067ebb42b0 --- /dev/null +++ b/vendor/github.com/Shopify/sarama/delete_groups_response.go @@ -0,0 +1,70 @@ +package sarama + +import ( + "time" +) + +type DeleteGroupsResponse struct { + ThrottleTime time.Duration + GroupErrorCodes map[string]KError +} + +func (r *DeleteGroupsResponse) encode(pe packetEncoder) error { + pe.putInt32(int32(r.ThrottleTime / time.Millisecond)) + + if err := pe.putArrayLength(len(r.GroupErrorCodes)); err != nil { + return err + } + for groupID, errorCode := range r.GroupErrorCodes { + if err := pe.putString(groupID); err != nil { + return err + } + pe.putInt16(int16(errorCode)) + } + + return nil +} + +func (r *DeleteGroupsResponse) decode(pd packetDecoder, version int16) error { + throttleTime, err := pd.getInt32() + if err != nil { + return err + } + r.ThrottleTime = time.Duration(throttleTime) * time.Millisecond + + n, err := pd.getArrayLength() + if err != nil { + return err + } + if n == 0 { + return nil + } + + r.GroupErrorCodes = make(map[string]KError, n) + for i := 0; i < n; i++ { + groupID, err := pd.getString() + if err != nil { + return err + } + errorCode, err := pd.getInt16() + if err != nil { + return err + } + + r.GroupErrorCodes[groupID] = KError(errorCode) + } + + return nil +} + +func (r *DeleteGroupsResponse) key() int16 { + return 42 +} + +func (r *DeleteGroupsResponse) version() int16 { + return 0 +} + +func (r *DeleteGroupsResponse) requiredVersion() KafkaVersion { + return V1_1_0_0 +} diff --git a/vendor/github.com/Shopify/sarama/delete_records_request.go b/vendor/github.com/Shopify/sarama/delete_records_request.go new file mode 100644 index 00000000000..93efafd4d0b --- /dev/null +++ b/vendor/github.com/Shopify/sarama/delete_records_request.go @@ -0,0 +1,126 @@ +package sarama + +import ( + "sort" + "time" +) + +// request message format is: +// [topic] timeout(int32) +// where topic is: +// name(string) [partition] +// where partition is: +// id(int32) offset(int64) + +type DeleteRecordsRequest struct { + Topics map[string]*DeleteRecordsRequestTopic + Timeout time.Duration +} + +func (d *DeleteRecordsRequest) encode(pe packetEncoder) error { + if err := pe.putArrayLength(len(d.Topics)); err != nil { + return err + } + keys := make([]string, 0, len(d.Topics)) + for topic := range d.Topics { + keys = append(keys, topic) + } + sort.Strings(keys) + for _, topic := range keys { + if err := pe.putString(topic); err != nil { + return err + } + if err := d.Topics[topic].encode(pe); err != nil { + return err + } + } + pe.putInt32(int32(d.Timeout / time.Millisecond)) + + return nil +} + +func (d *DeleteRecordsRequest) decode(pd packetDecoder, version int16) error { + n, err := pd.getArrayLength() + if err != nil { + return err + } + + if n > 0 { + d.Topics = make(map[string]*DeleteRecordsRequestTopic, n) + for i := 0; i < n; i++ { + topic, err := pd.getString() + if err != nil { + return err + } + details := new(DeleteRecordsRequestTopic) + if err = details.decode(pd, version); err != nil { + return err + } + d.Topics[topic] = details + } + } + + timeout, err := pd.getInt32() + if err != nil { + return err + } + d.Timeout = time.Duration(timeout) * time.Millisecond + + return nil +} + +func (d *DeleteRecordsRequest) key() int16 { + return 21 +} + +func (d *DeleteRecordsRequest) version() int16 { + return 0 +} + +func (d *DeleteRecordsRequest) requiredVersion() KafkaVersion { + return V0_11_0_0 +} + +type DeleteRecordsRequestTopic struct { + PartitionOffsets map[int32]int64 // partition => offset +} + +func (t *DeleteRecordsRequestTopic) encode(pe packetEncoder) error { + if err := pe.putArrayLength(len(t.PartitionOffsets)); err != nil { + return err + } + keys := make([]int32, 0, len(t.PartitionOffsets)) + for partition := range t.PartitionOffsets { + keys = append(keys, partition) + } + sort.Slice(keys, func(i, j int) bool { return keys[i] < keys[j] }) + for _, partition := range keys { + pe.putInt32(partition) + pe.putInt64(t.PartitionOffsets[partition]) + } + return nil +} + +func (t *DeleteRecordsRequestTopic) decode(pd packetDecoder, version int16) error { + n, err := pd.getArrayLength() + if err != nil { + return err + } + + if n > 0 { + t.PartitionOffsets = make(map[int32]int64, n) + for i := 0; i < n; i++ { + partition, err := pd.getInt32() + if err != nil { + return err + } + offset, err := pd.getInt64() + if err != nil { + return err + } + t.PartitionOffsets[partition] = offset + } + } + + return nil +} diff --git a/vendor/github.com/Shopify/sarama/delete_records_response.go b/vendor/github.com/Shopify/sarama/delete_records_response.go new file mode 100644 index 00000000000..733a58b6bc3 --- /dev/null +++ b/vendor/github.com/Shopify/sarama/delete_records_response.go @@ -0,0 +1,158 @@ +package sarama + +import ( + "sort" + "time" +) + +// response message format is: +// throttleMs(int32) [topic] +// where topic is: +// name(string) [partition] +// where partition is: +// id(int32) low_watermark(int64) error_code(int16) + +type DeleteRecordsResponse struct { + Version int16 + ThrottleTime time.Duration + Topics map[string]*DeleteRecordsResponseTopic +} + +func (d *DeleteRecordsResponse) encode(pe packetEncoder) error { + pe.putInt32(int32(d.ThrottleTime / time.Millisecond)) + + if err := pe.putArrayLength(len(d.Topics)); err != nil { + return err + } + keys := make([]string, 0, len(d.Topics)) + for topic := range d.Topics { + keys = append(keys, topic) + } + sort.Strings(keys) + for _, topic := range keys { + if err := pe.putString(topic); err != nil { + return err + } + if err := d.Topics[topic].encode(pe); err != nil { + return err + } + } + return nil +} + +func (d *DeleteRecordsResponse) decode(pd packetDecoder, version int16) error { + d.Version = version + + throttleTime, err := pd.getInt32() + if err != nil { + return err + } + d.ThrottleTime = time.Duration(throttleTime) * time.Millisecond + + n, err := pd.getArrayLength() + if err != nil { + return err + } + + if n > 0 { + d.Topics = make(map[string]*DeleteRecordsResponseTopic, n) + for i := 0; i < n; i++ { + topic, err := pd.getString() + if err != nil { + return err + } + details := new(DeleteRecordsResponseTopic) + if err = details.decode(pd, version); err != nil { + return err + } + d.Topics[topic] = details + } + } + + return nil +} + +func (d *DeleteRecordsResponse) key() int16 { + return 21 +} + +func (d *DeleteRecordsResponse) version() int16 { + return 0 +} + +func (d *DeleteRecordsResponse) requiredVersion() KafkaVersion { + return V0_11_0_0 +} + +type DeleteRecordsResponseTopic struct { + Partitions map[int32]*DeleteRecordsResponsePartition +} + +func (t *DeleteRecordsResponseTopic) encode(pe packetEncoder) error { + if err := pe.putArrayLength(len(t.Partitions)); err != nil { + return err + } + keys := make([]int32, 0, len(t.Partitions)) + for partition := range t.Partitions { + keys = append(keys, partition) + } + sort.Slice(keys, func(i, j int) bool { return keys[i] < keys[j] }) + for _, partition := range keys { + pe.putInt32(partition) + if err := t.Partitions[partition].encode(pe); err != nil { + return err + } + } + return nil +} + +func (t *DeleteRecordsResponseTopic) decode(pd packetDecoder, version int16) error { + n, err := pd.getArrayLength() + if err != nil { + return err + } + + if n > 0 { + t.Partitions = make(map[int32]*DeleteRecordsResponsePartition, n) + for i := 0; i < n; i++ { + partition, err := pd.getInt32() + if err != nil { + return err + } + details := new(DeleteRecordsResponsePartition) + if err = details.decode(pd, version); err != nil { + return err + } + t.Partitions[partition] = details + } + } + + return nil +} + +type DeleteRecordsResponsePartition struct { + LowWatermark int64 + Err KError +} + +func (t *DeleteRecordsResponsePartition) encode(pe packetEncoder) error { + pe.putInt64(t.LowWatermark) + pe.putInt16(int16(t.Err)) + return nil +} + +func (t *DeleteRecordsResponsePartition) decode(pd packetDecoder, version int16) error { + lowWatermark, err := pd.getInt64() + if err != nil { + return err + } + t.LowWatermark = lowWatermark + + kErr, err := pd.getInt16() + if err != nil { + return err + } + t.Err = KError(kErr) + + return nil +} diff --git a/vendor/github.com/Shopify/sarama/delete_topics_request.go b/vendor/github.com/Shopify/sarama/delete_topics_request.go index ed9089ea478..911f67d31ba 100644 --- a/vendor/github.com/Shopify/sarama/delete_topics_request.go +++ b/vendor/github.com/Shopify/sarama/delete_topics_request.go @@ -3,6 +3,7 @@ package sarama import "time" type DeleteTopicsRequest struct { + Version int16 Topics []string Timeout time.Duration } @@ -25,6 +26,7 @@ func (d *DeleteTopicsRequest) decode(pd packetDecoder, version int16) (err error return err } d.Timeout = time.Duration(timeout) * time.Millisecond + d.Version = version return nil } @@ -33,9 +35,14 @@ func (d *DeleteTopicsRequest) key() int16 { } func (d *DeleteTopicsRequest) version() int16 { - return 0 + return d.Version } func (d *DeleteTopicsRequest) requiredVersion() KafkaVersion { - return V0_10_1_0 + switch d.Version { + case 1: + return V0_11_0_0 + default: + return V0_10_1_0 + } } diff --git a/vendor/github.com/Shopify/sarama/errors.go b/vendor/github.com/Shopify/sarama/errors.go index 54f431a4a91..c578ef5fb43 100644 --- a/vendor/github.com/Shopify/sarama/errors.go +++ b/vendor/github.com/Shopify/sarama/errors.go @@ -41,6 +41,14 @@ var ErrMessageTooLarge = errors.New("kafka: message is larger than Consumer.Fetc // a RecordBatch. var ErrConsumerOffsetNotAdvanced = errors.New("kafka: consumer offset was not advanced after a RecordBatch") +// ErrControllerNotAvailable is returned when server didn't give correct controller id. May be kafka server's version +// is lower than 0.10.0.0. +var ErrControllerNotAvailable = errors.New("kafka: controller is not available") + +// ErrNoTopicsToUpdateMetadata is returned when Meta.Full is set to false but no specific topics were found to update +// the metadata. +var ErrNoTopicsToUpdateMetadata = errors.New("kafka: no specific topics to update metadata") + // PacketEncodingError is returned from a failure while encoding a Kafka packet. This can happen, for example, // if you try to encode a string over 2^15 characters in length, since Kafka's encoding rules do not permit that. type PacketEncodingError struct { diff --git a/vendor/github.com/Shopify/sarama/fetch_request.go b/vendor/github.com/Shopify/sarama/fetch_request.go index 8c8e3a5afc8..462ab8afbb8 100644 --- a/vendor/github.com/Shopify/sarama/fetch_request.go +++ b/vendor/github.com/Shopify/sarama/fetch_request.go @@ -149,7 +149,7 @@ func (r *FetchRequest) requiredVersion() KafkaVersion { case 4: return V0_11_0_0 default: - return minVersion + return MinVersion } } diff --git a/vendor/github.com/Shopify/sarama/fetch_response.go b/vendor/github.com/Shopify/sarama/fetch_response.go index 0e81ad89f43..ae91bb9eb09 100644 --- a/vendor/github.com/Shopify/sarama/fetch_response.go +++ b/vendor/github.com/Shopify/sarama/fetch_response.go @@ -280,7 +280,7 @@ func (r *FetchResponse) requiredVersion() KafkaVersion { case 4: return V0_11_0_0 default: - return minVersion + return MinVersion } } @@ -353,7 +353,7 @@ func (r *FetchResponse) AddMessage(topic string, partition int32, key, value Enc records := newLegacyRecords(&MessageSet{}) frb.RecordsSet = []*Records{&records} } - set := frb.RecordsSet[0].msgSet + set := frb.RecordsSet[0].MsgSet set.Messages = append(set.Messages, msgBlock) } @@ -365,7 +365,7 @@ func (r *FetchResponse) AddRecord(topic string, partition int32, key, value Enco records := newDefaultRecords(&RecordBatch{Version: 2}) frb.RecordsSet = []*Records{&records} } - batch := frb.RecordsSet[0].recordBatch + batch := frb.RecordsSet[0].RecordBatch batch.addRecord(rec) } @@ -375,7 +375,7 @@ func (r *FetchResponse) SetLastOffsetDelta(topic string, partition int32, offset records := newDefaultRecords(&RecordBatch{Version: 2}) frb.RecordsSet = []*Records{&records} } - batch := frb.RecordsSet[0].recordBatch + batch := frb.RecordsSet[0].RecordBatch batch.LastOffsetDelta = offset } diff --git a/vendor/github.com/Shopify/sarama/find_coordinator_request.go b/vendor/github.com/Shopify/sarama/find_coordinator_request.go new file mode 100644 index 00000000000..0ab5cb5ff57 --- /dev/null +++ b/vendor/github.com/Shopify/sarama/find_coordinator_request.go @@ -0,0 +1,61 @@ +package sarama + +type CoordinatorType int8 + +const ( + CoordinatorGroup CoordinatorType = 0 + CoordinatorTransaction CoordinatorType = 1 +) + +type FindCoordinatorRequest struct { + Version int16 + CoordinatorKey string + CoordinatorType CoordinatorType +} + +func (f *FindCoordinatorRequest) encode(pe packetEncoder) error { + if err := pe.putString(f.CoordinatorKey); err != nil { + return err + } + + if f.Version >= 1 { + pe.putInt8(int8(f.CoordinatorType)) + } + + return nil +} + +func (f *FindCoordinatorRequest) decode(pd packetDecoder, version int16) (err error) { + if f.CoordinatorKey, err = pd.getString(); err != nil { + return err + } + + if version >= 1 { + f.Version = version + coordinatorType, err := pd.getInt8() + if err != nil { + return err + } + + f.CoordinatorType = CoordinatorType(coordinatorType) + } + + return nil +} + +func (f *FindCoordinatorRequest) key() int16 { + return 10 +} + +func (f *FindCoordinatorRequest) version() int16 { + return f.Version +} + +func (f *FindCoordinatorRequest) requiredVersion() KafkaVersion { + switch f.Version { + case 1: + return V0_11_0_0 + default: + return V0_8_2_0 + } +} diff --git a/vendor/github.com/Shopify/sarama/find_coordinator_response.go b/vendor/github.com/Shopify/sarama/find_coordinator_response.go new file mode 100644 index 00000000000..9c900e8b774 --- /dev/null +++ b/vendor/github.com/Shopify/sarama/find_coordinator_response.go @@ -0,0 +1,92 @@ +package sarama + +import ( + "time" +) + +var NoNode = &Broker{id: -1, addr: ":-1"} + +type FindCoordinatorResponse struct { + Version int16 + ThrottleTime time.Duration + Err KError + ErrMsg *string + Coordinator *Broker +} + +func (f *FindCoordinatorResponse) decode(pd packetDecoder, version int16) (err error) { + if version >= 1 { + f.Version = version + + throttleTime, err := pd.getInt32() + if err != nil { + return err + } + f.ThrottleTime = time.Duration(throttleTime) * time.Millisecond + } + + tmp, err := pd.getInt16() + if err != nil { + return err + } + f.Err = KError(tmp) + + if version >= 1 { + if f.ErrMsg, err = pd.getNullableString(); err != nil { + return err + } + } + + coordinator := new(Broker) + // The version is hardcoded to 0, as version 1 of the Broker-decode + // contains the rack-field which is not present in the FindCoordinatorResponse. + if err := coordinator.decode(pd, 0); err != nil { + return err + } + if coordinator.addr == ":0" { + return nil + } + f.Coordinator = coordinator + + return nil +} + +func (f *FindCoordinatorResponse) encode(pe packetEncoder) error { + if f.Version >= 1 { + pe.putInt32(int32(f.ThrottleTime / time.Millisecond)) + } + + pe.putInt16(int16(f.Err)) + + if f.Version >= 1 { + if err := pe.putNullableString(f.ErrMsg); err != nil { + return err + } + } + + coordinator := f.Coordinator + if coordinator == nil { + coordinator = NoNode + } + if err := coordinator.encode(pe, 0); err != nil { + return err + } + return nil +} + +func (f *FindCoordinatorResponse) key() int16 { + return 10 +} + +func (f *FindCoordinatorResponse) version() int16 { + return f.Version +} + +func (f *FindCoordinatorResponse) requiredVersion() KafkaVersion { + switch f.Version { + case 1: + return V0_11_0_0 + default: + return V0_8_2_0 + } +} diff --git a/vendor/github.com/Shopify/sarama/join_group_request.go b/vendor/github.com/Shopify/sarama/join_group_request.go index 3a7ba17122d..97e9299ea1a 100644 --- a/vendor/github.com/Shopify/sarama/join_group_request.go +++ b/vendor/github.com/Shopify/sarama/join_group_request.go @@ -25,8 +25,10 @@ func (p *GroupProtocol) encode(pe packetEncoder) (err error) { } type JoinGroupRequest struct { + Version int16 GroupId string SessionTimeout int32 + RebalanceTimeout int32 MemberId string ProtocolType string GroupProtocols map[string][]byte // deprecated; use OrderedGroupProtocols @@ -38,6 +40,9 @@ func (r *JoinGroupRequest) encode(pe packetEncoder) error { return err } pe.putInt32(r.SessionTimeout) + if r.Version >= 1 { + pe.putInt32(r.RebalanceTimeout) + } if err := pe.putString(r.MemberId); err != nil { return err } @@ -76,6 +81,8 @@ func (r *JoinGroupRequest) encode(pe packetEncoder) error { } func (r *JoinGroupRequest) decode(pd packetDecoder, version int16) (err error) { + r.Version = version + if r.GroupId, err = pd.getString(); err != nil { return } @@ -84,6 +91,12 @@ func (r *JoinGroupRequest) decode(pd packetDecoder, version int16) (err error) { return } + if version >= 1 { + if r.RebalanceTimeout, err = pd.getInt32(); err != nil { + return err + } + } + if r.MemberId, err = pd.getString(); err != nil { return } @@ -118,11 +131,18 @@ func (r *JoinGroupRequest) key() int16 { } func (r *JoinGroupRequest) version() int16 { - return 0 + return r.Version } func (r *JoinGroupRequest) requiredVersion() KafkaVersion { - return V0_9_0_0 + switch r.Version { + case 2: + return V0_11_0_0 + case 1: + return V0_10_1_0 + default: + return V0_9_0_0 + } } func (r *JoinGroupRequest) AddGroupProtocol(name string, metadata []byte) { diff --git a/vendor/github.com/Shopify/sarama/join_group_response.go b/vendor/github.com/Shopify/sarama/join_group_response.go index 6d35fe36494..5752acc8aeb 100644 --- a/vendor/github.com/Shopify/sarama/join_group_response.go +++ b/vendor/github.com/Shopify/sarama/join_group_response.go @@ -1,6 +1,8 @@ package sarama type JoinGroupResponse struct { + Version int16 + ThrottleTime int32 Err KError GenerationId int32 GroupProtocol string @@ -22,6 +24,9 @@ func (r *JoinGroupResponse) GetMembers() (map[string]ConsumerGroupMemberMetadata } func (r *JoinGroupResponse) encode(pe packetEncoder) error { + if r.Version >= 2 { + pe.putInt32(r.ThrottleTime) + } pe.putInt16(int16(r.Err)) pe.putInt32(r.GenerationId) @@ -53,6 +58,14 @@ func (r *JoinGroupResponse) encode(pe packetEncoder) error { } func (r *JoinGroupResponse) decode(pd packetDecoder, version int16) (err error) { + r.Version = version + + if version >= 2 { + if r.ThrottleTime, err = pd.getInt32(); err != nil { + return + } + } + kerr, err := pd.getInt16() if err != nil { return err @@ -107,9 +120,16 @@ func (r *JoinGroupResponse) key() int16 { } func (r *JoinGroupResponse) version() int16 { - return 0 + return r.Version } func (r *JoinGroupResponse) requiredVersion() KafkaVersion { - return V0_9_0_0 + switch r.Version { + case 2: + return V0_11_0_0 + case 1: + return V0_10_1_0 + default: + return V0_9_0_0 + } } diff --git a/vendor/github.com/Shopify/sarama/message.go b/vendor/github.com/Shopify/sarama/message.go index bd5650bbc07..fecdbfdef75 100644 --- a/vendor/github.com/Shopify/sarama/message.go +++ b/vendor/github.com/Shopify/sarama/message.go @@ -24,13 +24,28 @@ const ( CompressionLZ4 CompressionCodec = 3 ) +func (cc CompressionCodec) String() string { + return []string{ + "none", + "gzip", + "snappy", + "lz4", + }[int(cc)] +} + +// CompressionLevelDefault is the constant to use in CompressionLevel +// to have the default compression level for any codec. The value is picked +// that we don't use any existing compression levels. +const CompressionLevelDefault = -1000 + type Message struct { - Codec CompressionCodec // codec used to compress the message contents - Key []byte // the message key, may be nil - Value []byte // the message contents - Set *MessageSet // the message set a message might wrap - Version int8 // v1 requires Kafka 0.10 - Timestamp time.Time // the timestamp of the message (version 1+ only) + Codec CompressionCodec // codec used to compress the message contents + CompressionLevel int // compression level + Key []byte // the message key, may be nil + Value []byte // the message contents + Set *MessageSet // the message set a message might wrap + Version int8 // v1 requires Kafka 0.10 + Timestamp time.Time // the timestamp of the message (version 1+ only) compressedCache []byte compressedSize int // used for computing the compression ratio metrics @@ -66,7 +81,15 @@ func (m *Message) encode(pe packetEncoder) error { payload = m.Value case CompressionGZIP: var buf bytes.Buffer - writer := gzip.NewWriter(&buf) + var writer *gzip.Writer + if m.CompressionLevel != CompressionLevelDefault { + writer, err = gzip.NewWriterLevel(&buf, m.CompressionLevel) + if err != nil { + return err + } + } else { + writer = gzip.NewWriter(&buf) + } if _, err = writer.Write(m.Value); err != nil { return err } diff --git a/vendor/github.com/Shopify/sarama/metadata_request.go b/vendor/github.com/Shopify/sarama/metadata_request.go index 9a26b55fd03..48adfa28cb9 100644 --- a/vendor/github.com/Shopify/sarama/metadata_request.go +++ b/vendor/github.com/Shopify/sarama/metadata_request.go @@ -1,40 +1,65 @@ package sarama type MetadataRequest struct { - Topics []string + Version int16 + Topics []string + AllowAutoTopicCreation bool } func (r *MetadataRequest) encode(pe packetEncoder) error { - err := pe.putArrayLength(len(r.Topics)) - if err != nil { - return err + if r.Version < 0 || r.Version > 5 { + return PacketEncodingError{"invalid or unsupported MetadataRequest version field"} } - - for i := range r.Topics { - err = pe.putString(r.Topics[i]) + if r.Version == 0 || r.Topics != nil || len(r.Topics) > 0 { + err := pe.putArrayLength(len(r.Topics)) if err != nil { return err } + + for i := range r.Topics { + err = pe.putString(r.Topics[i]) + if err != nil { + return err + } + } + } else { + pe.putInt32(-1) + } + if r.Version > 3 { + pe.putBool(r.AllowAutoTopicCreation) } return nil } func (r *MetadataRequest) decode(pd packetDecoder, version int16) error { - topicCount, err := pd.getArrayLength() + r.Version = version + size, err := pd.getInt32() if err != nil { return err } - if topicCount == 0 { + if size < 0 { return nil - } + } else { + topicCount := size + if topicCount == 0 { + return nil + } - r.Topics = make([]string, topicCount) - for i := range r.Topics { - topic, err := pd.getString() + r.Topics = make([]string, topicCount) + for i := range r.Topics { + topic, err := pd.getString() + if err != nil { + return err + } + r.Topics[i] = topic + } + } + if r.Version > 3 { + autoCreation, err := pd.getBool() if err != nil { return err } - r.Topics[i] = topic + r.AllowAutoTopicCreation = autoCreation } return nil } @@ -44,9 +69,20 @@ func (r *MetadataRequest) key() int16 { } func (r *MetadataRequest) version() int16 { - return 0 + return r.Version } func (r *MetadataRequest) requiredVersion() KafkaVersion { - return minVersion + switch r.Version { + case 1: + return V0_10_0_0 + case 2: + return V0_10_1_0 + case 3, 4: + return V0_11_0_0 + case 5: + return V1_0_0_0 + default: + return MinVersion + } } diff --git a/vendor/github.com/Shopify/sarama/metadata_response.go b/vendor/github.com/Shopify/sarama/metadata_response.go index f9d6a4271ed..bf8a67bbc52 100644 --- a/vendor/github.com/Shopify/sarama/metadata_response.go +++ b/vendor/github.com/Shopify/sarama/metadata_response.go @@ -1,14 +1,15 @@ package sarama type PartitionMetadata struct { - Err KError - ID int32 - Leader int32 - Replicas []int32 - Isr []int32 + Err KError + ID int32 + Leader int32 + Replicas []int32 + Isr []int32 + OfflineReplicas []int32 } -func (pm *PartitionMetadata) decode(pd packetDecoder) (err error) { +func (pm *PartitionMetadata) decode(pd packetDecoder, version int16) (err error) { tmp, err := pd.getInt16() if err != nil { return err @@ -35,10 +36,17 @@ func (pm *PartitionMetadata) decode(pd packetDecoder) (err error) { return err } + if version >= 5 { + pm.OfflineReplicas, err = pd.getInt32Array() + if err != nil { + return err + } + } + return nil } -func (pm *PartitionMetadata) encode(pe packetEncoder) (err error) { +func (pm *PartitionMetadata) encode(pe packetEncoder, version int16) (err error) { pe.putInt16(int16(pm.Err)) pe.putInt32(pm.ID) pe.putInt32(pm.Leader) @@ -53,16 +61,24 @@ func (pm *PartitionMetadata) encode(pe packetEncoder) (err error) { return err } + if version >= 5 { + err = pe.putInt32Array(pm.OfflineReplicas) + if err != nil { + return err + } + } + return nil } type TopicMetadata struct { Err KError Name string + IsInternal bool // Only valid for Version >= 1 Partitions []*PartitionMetadata } -func (tm *TopicMetadata) decode(pd packetDecoder) (err error) { +func (tm *TopicMetadata) decode(pd packetDecoder, version int16) (err error) { tmp, err := pd.getInt16() if err != nil { return err @@ -74,6 +90,13 @@ func (tm *TopicMetadata) decode(pd packetDecoder) (err error) { return err } + if version >= 1 { + tm.IsInternal, err = pd.getBool() + if err != nil { + return err + } + } + n, err := pd.getArrayLength() if err != nil { return err @@ -81,7 +104,7 @@ func (tm *TopicMetadata) decode(pd packetDecoder) (err error) { tm.Partitions = make([]*PartitionMetadata, n) for i := 0; i < n; i++ { tm.Partitions[i] = new(PartitionMetadata) - err = tm.Partitions[i].decode(pd) + err = tm.Partitions[i].decode(pd, version) if err != nil { return err } @@ -90,7 +113,7 @@ func (tm *TopicMetadata) decode(pd packetDecoder) (err error) { return nil } -func (tm *TopicMetadata) encode(pe packetEncoder) (err error) { +func (tm *TopicMetadata) encode(pe packetEncoder, version int16) (err error) { pe.putInt16(int16(tm.Err)) err = pe.putString(tm.Name) @@ -98,13 +121,17 @@ func (tm *TopicMetadata) encode(pe packetEncoder) (err error) { return err } + if version >= 1 { + pe.putBool(tm.IsInternal) + } + err = pe.putArrayLength(len(tm.Partitions)) if err != nil { return err } for _, pm := range tm.Partitions { - err = pm.encode(pe) + err = pm.encode(pe, version) if err != nil { return err } @@ -114,11 +141,24 @@ func (tm *TopicMetadata) encode(pe packetEncoder) (err error) { } type MetadataResponse struct { - Brokers []*Broker - Topics []*TopicMetadata + Version int16 + ThrottleTimeMs int32 + Brokers []*Broker + ClusterID *string + ControllerID int32 + Topics []*TopicMetadata } func (r *MetadataResponse) decode(pd packetDecoder, version int16) (err error) { + r.Version = version + + if version >= 3 { + r.ThrottleTimeMs, err = pd.getInt32() + if err != nil { + return err + } + } + n, err := pd.getArrayLength() if err != nil { return err @@ -127,12 +167,28 @@ func (r *MetadataResponse) decode(pd packetDecoder, version int16) (err error) { r.Brokers = make([]*Broker, n) for i := 0; i < n; i++ { r.Brokers[i] = new(Broker) - err = r.Brokers[i].decode(pd) + err = r.Brokers[i].decode(pd, version) + if err != nil { + return err + } + } + + if version >= 2 { + r.ClusterID, err = pd.getNullableString() if err != nil { return err } } + if version >= 1 { + r.ControllerID, err = pd.getInt32() + if err != nil { + return err + } + } else { + r.ControllerID = -1 + } + n, err = pd.getArrayLength() if err != nil { return err @@ -141,7 +197,7 @@ func (r *MetadataResponse) decode(pd packetDecoder, version int16) (err error) { r.Topics = make([]*TopicMetadata, n) for i := 0; i < n; i++ { r.Topics[i] = new(TopicMetadata) - err = r.Topics[i].decode(pd) + err = r.Topics[i].decode(pd, version) if err != nil { return err } @@ -156,18 +212,22 @@ func (r *MetadataResponse) encode(pe packetEncoder) error { return err } for _, broker := range r.Brokers { - err = broker.encode(pe) + err = broker.encode(pe, r.Version) if err != nil { return err } } + if r.Version >= 1 { + pe.putInt32(r.ControllerID) + } + err = pe.putArrayLength(len(r.Topics)) if err != nil { return err } for _, tm := range r.Topics { - err = tm.encode(pe) + err = tm.encode(pe, r.Version) if err != nil { return err } @@ -181,11 +241,22 @@ func (r *MetadataResponse) key() int16 { } func (r *MetadataResponse) version() int16 { - return 0 + return r.Version } func (r *MetadataResponse) requiredVersion() KafkaVersion { - return minVersion + switch r.Version { + case 1: + return V0_10_0_0 + case 2: + return V0_10_1_0 + case 3, 4: + return V0_11_0_0 + case 5: + return V1_0_0_0 + default: + return MinVersion + } } // testing API diff --git a/vendor/github.com/Shopify/sarama/mockresponses.go b/vendor/github.com/Shopify/sarama/mockresponses.go index f79a9d5e9b4..5541d32ec69 100644 --- a/vendor/github.com/Shopify/sarama/mockresponses.go +++ b/vendor/github.com/Shopify/sarama/mockresponses.go @@ -68,9 +68,10 @@ func (mc *MockSequence) For(reqBody versionedDecoder) (res encoder) { // MockMetadataResponse is a `MetadataResponse` builder. type MockMetadataResponse struct { - leaders map[string]map[int32]int32 - brokers map[string]int32 - t TestReporter + controllerID int32 + leaders map[string]map[int32]int32 + brokers map[string]int32 + t TestReporter } func NewMockMetadataResponse(t TestReporter) *MockMetadataResponse { @@ -96,9 +97,17 @@ func (mmr *MockMetadataResponse) SetBroker(addr string, brokerID int32) *MockMet return mmr } +func (mmr *MockMetadataResponse) SetController(brokerID int32) *MockMetadataResponse { + mmr.controllerID = brokerID + return mmr +} + func (mmr *MockMetadataResponse) For(reqBody versionedDecoder) encoder { metadataRequest := reqBody.(*MetadataRequest) - metadataResponse := &MetadataResponse{} + metadataResponse := &MetadataResponse{ + Version: metadataRequest.version(), + ControllerID: mmr.controllerID, + } for addr, brokerID := range mmr.brokers { metadataResponse.AddBroker(addr, brokerID) } @@ -326,6 +335,60 @@ func (mr *MockConsumerMetadataResponse) For(reqBody versionedDecoder) encoder { return res } +// MockFindCoordinatorResponse is a `FindCoordinatorResponse` builder. +type MockFindCoordinatorResponse struct { + groupCoordinators map[string]interface{} + transCoordinators map[string]interface{} + t TestReporter +} + +func NewMockFindCoordinatorResponse(t TestReporter) *MockFindCoordinatorResponse { + return &MockFindCoordinatorResponse{ + groupCoordinators: make(map[string]interface{}), + transCoordinators: make(map[string]interface{}), + t: t, + } +} + +func (mr *MockFindCoordinatorResponse) SetCoordinator(coordinatorType CoordinatorType, group string, broker *MockBroker) *MockFindCoordinatorResponse { + switch coordinatorType { + case CoordinatorGroup: + mr.groupCoordinators[group] = broker + case CoordinatorTransaction: + mr.transCoordinators[group] = broker + } + return mr +} + +func (mr *MockFindCoordinatorResponse) SetError(coordinatorType CoordinatorType, group string, kerror KError) *MockFindCoordinatorResponse { + switch coordinatorType { + case CoordinatorGroup: + mr.groupCoordinators[group] = kerror + case CoordinatorTransaction: + mr.transCoordinators[group] = kerror + } + return mr +} + +func (mr *MockFindCoordinatorResponse) For(reqBody versionedDecoder) encoder { + req := reqBody.(*FindCoordinatorRequest) + res := &FindCoordinatorResponse{} + var v interface{} + switch req.CoordinatorType { + case CoordinatorGroup: + v = mr.groupCoordinators[req.CoordinatorKey] + case CoordinatorTransaction: + v = mr.transCoordinators[req.CoordinatorKey] + } + switch v := v.(type) { + case *MockBroker: + res.Coordinator = &Broker{id: v.BrokerID(), addr: v.Addr()} + case KError: + res.Err = v + } + return res +} + // MockOffsetCommitResponse is a `OffsetCommitResponse` builder. type MockOffsetCommitResponse struct { errors map[string]map[string]map[int32]KError diff --git a/vendor/github.com/Shopify/sarama/offset_commit_request.go b/vendor/github.com/Shopify/sarama/offset_commit_request.go index b21ea634b02..37e99fbf5b8 100644 --- a/vendor/github.com/Shopify/sarama/offset_commit_request.go +++ b/vendor/github.com/Shopify/sarama/offset_commit_request.go @@ -1,5 +1,7 @@ package sarama +import "errors" + // ReceiveTime is a special value for the timestamp field of Offset Commit Requests which // tells the broker to set the timestamp to the time at which the request was received. // The timestamp is only used if message version 1 is used, which requires kafka 0.8.2. @@ -173,7 +175,7 @@ func (r *OffsetCommitRequest) requiredVersion() KafkaVersion { case 2: return V0_9_0_0 default: - return minVersion + return MinVersion } } @@ -188,3 +190,15 @@ func (r *OffsetCommitRequest) AddBlock(topic string, partitionID int32, offset i r.blocks[topic][partitionID] = &offsetCommitRequestBlock{offset, timestamp, metadata} } + +func (r *OffsetCommitRequest) Offset(topic string, partitionID int32) (int64, string, error) { + partitions := r.blocks[topic] + if partitions == nil { + return 0, "", errors.New("No such offset") + } + block := partitions[partitionID] + if block == nil { + return 0, "", errors.New("No such offset") + } + return block.offset, block.metadata, nil +} diff --git a/vendor/github.com/Shopify/sarama/offset_commit_response.go b/vendor/github.com/Shopify/sarama/offset_commit_response.go index 7f277e7753a..a4b18acdff2 100644 --- a/vendor/github.com/Shopify/sarama/offset_commit_response.go +++ b/vendor/github.com/Shopify/sarama/offset_commit_response.go @@ -81,5 +81,5 @@ func (r *OffsetCommitResponse) version() int16 { } func (r *OffsetCommitResponse) requiredVersion() KafkaVersion { - return minVersion + return MinVersion } diff --git a/vendor/github.com/Shopify/sarama/offset_fetch_request.go b/vendor/github.com/Shopify/sarama/offset_fetch_request.go index b19fe79ba7a..5a05014b481 100644 --- a/vendor/github.com/Shopify/sarama/offset_fetch_request.go +++ b/vendor/github.com/Shopify/sarama/offset_fetch_request.go @@ -68,7 +68,7 @@ func (r *OffsetFetchRequest) requiredVersion() KafkaVersion { case 1: return V0_8_2_0 default: - return minVersion + return MinVersion } } diff --git a/vendor/github.com/Shopify/sarama/offset_fetch_response.go b/vendor/github.com/Shopify/sarama/offset_fetch_response.go index 323220eac97..11e4b1f3fdf 100644 --- a/vendor/github.com/Shopify/sarama/offset_fetch_response.go +++ b/vendor/github.com/Shopify/sarama/offset_fetch_response.go @@ -115,7 +115,7 @@ func (r *OffsetFetchResponse) version() int16 { } func (r *OffsetFetchResponse) requiredVersion() KafkaVersion { - return minVersion + return MinVersion } func (r *OffsetFetchResponse) GetBlock(topic string, partition int32) *OffsetFetchResponseBlock { diff --git a/vendor/github.com/Shopify/sarama/offset_request.go b/vendor/github.com/Shopify/sarama/offset_request.go index a14f71828df..687fef284a2 100644 --- a/vendor/github.com/Shopify/sarama/offset_request.go +++ b/vendor/github.com/Shopify/sarama/offset_request.go @@ -117,7 +117,7 @@ func (r *OffsetRequest) requiredVersion() KafkaVersion { case 1: return V0_10_1_0 default: - return minVersion + return MinVersion } } diff --git a/vendor/github.com/Shopify/sarama/offset_response.go b/vendor/github.com/Shopify/sarama/offset_response.go index 9a9cfe96f3b..8b2193f9a0b 100644 --- a/vendor/github.com/Shopify/sarama/offset_response.go +++ b/vendor/github.com/Shopify/sarama/offset_response.go @@ -155,7 +155,7 @@ func (r *OffsetResponse) requiredVersion() KafkaVersion { case 1: return V0_10_1_0 default: - return minVersion + return MinVersion } } diff --git a/vendor/github.com/Shopify/sarama/produce_request.go b/vendor/github.com/Shopify/sarama/produce_request.go index 0ec4d8d53f9..0c755d02b64 100644 --- a/vendor/github.com/Shopify/sarama/produce_request.go +++ b/vendor/github.com/Shopify/sarama/produce_request.go @@ -113,9 +113,9 @@ func (r *ProduceRequest) encode(pe packetEncoder) error { } if metricRegistry != nil { if r.Version >= 3 { - topicRecordCount += updateBatchMetrics(records.recordBatch, compressionRatioMetric, topicCompressionRatioMetric) + topicRecordCount += updateBatchMetrics(records.RecordBatch, compressionRatioMetric, topicCompressionRatioMetric) } else { - topicRecordCount += updateMsgSetMetrics(records.msgSet, compressionRatioMetric, topicCompressionRatioMetric) + topicRecordCount += updateMsgSetMetrics(records.MsgSet, compressionRatioMetric, topicCompressionRatioMetric) } batchSize := int64(pe.offset() - startOffset) batchSizeMetric.Update(batchSize) @@ -215,7 +215,7 @@ func (r *ProduceRequest) requiredVersion() KafkaVersion { case 3: return V0_11_0_0 default: - return minVersion + return MinVersion } } @@ -231,7 +231,7 @@ func (r *ProduceRequest) ensureRecords(topic string, partition int32) { func (r *ProduceRequest) AddMessage(topic string, partition int32, msg *Message) { r.ensureRecords(topic, partition) - set := r.records[topic][partition].msgSet + set := r.records[topic][partition].MsgSet if set == nil { set = new(MessageSet) diff --git a/vendor/github.com/Shopify/sarama/produce_response.go b/vendor/github.com/Shopify/sarama/produce_response.go index 043c40f8772..667e34c661b 100644 --- a/vendor/github.com/Shopify/sarama/produce_response.go +++ b/vendor/github.com/Shopify/sarama/produce_response.go @@ -152,7 +152,7 @@ func (r *ProduceResponse) requiredVersion() KafkaVersion { case 3: return V0_11_0_0 default: - return minVersion + return MinVersion } } diff --git a/vendor/github.com/Shopify/sarama/produce_set.go b/vendor/github.com/Shopify/sarama/produce_set.go index 3cbaeb5f90e..13be2b3c92b 100644 --- a/vendor/github.com/Shopify/sarama/produce_set.go +++ b/vendor/github.com/Shopify/sarama/produce_set.go @@ -59,10 +59,11 @@ func (ps *produceSet) add(msg *ProducerMessage) error { if set == nil { if ps.parent.conf.Version.IsAtLeast(V0_11_0_0) { batch := &RecordBatch{ - FirstTimestamp: timestamp, - Version: 2, - ProducerID: -1, /* No producer id */ - Codec: ps.parent.conf.Producer.Compression, + FirstTimestamp: timestamp, + Version: 2, + ProducerID: -1, /* No producer id */ + Codec: ps.parent.conf.Producer.Compression, + CompressionLevel: ps.parent.conf.Producer.CompressionLevel, } set = &partitionSet{recordsToSend: newDefaultRecords(batch)} size = recordBatchOverhead @@ -79,7 +80,7 @@ func (ps *produceSet) add(msg *ProducerMessage) error { rec := &Record{ Key: key, Value: val, - TimestampDelta: timestamp.Sub(set.recordsToSend.recordBatch.FirstTimestamp), + TimestampDelta: timestamp.Sub(set.recordsToSend.RecordBatch.FirstTimestamp), } size += len(key) + len(val) if len(msg.Headers) > 0 { @@ -89,14 +90,14 @@ func (ps *produceSet) add(msg *ProducerMessage) error { size += len(rec.Headers[i].Key) + len(rec.Headers[i].Value) + 2*binary.MaxVarintLen32 } } - set.recordsToSend.recordBatch.addRecord(rec) + set.recordsToSend.RecordBatch.addRecord(rec) } else { msgToSend := &Message{Codec: CompressionNone, Key: key, Value: val} if ps.parent.conf.Version.IsAtLeast(V0_10_0_0) { msgToSend.Timestamp = timestamp msgToSend.Version = 1 } - set.recordsToSend.msgSet.addMessage(msgToSend) + set.recordsToSend.MsgSet.addMessage(msgToSend) size = producerMessageOverhead + len(key) + len(val) } @@ -122,7 +123,14 @@ func (ps *produceSet) buildRequest() *ProduceRequest { for topic, partitionSet := range ps.msgs { for partition, set := range partitionSet { if req.Version >= 3 { - rb := set.recordsToSend.recordBatch + // If the API version we're hitting is 3 or greater, we need to calculate + // offsets for each record in the batch relative to FirstOffset. + // Additionally, we must set LastOffsetDelta to the value of the last offset + // in the batch. Since the OffsetDelta of the first record is 0, we know that the + // final record of any batch will have an offset of (# of records in batch) - 1. + // (See https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-Messagesets + // under the RecordBatch section for details.) + rb := set.recordsToSend.RecordBatch if len(rb.Records) > 0 { rb.LastOffsetDelta = int32(len(rb.Records) - 1) for i, record := range rb.Records { @@ -134,7 +142,7 @@ func (ps *produceSet) buildRequest() *ProduceRequest { continue } if ps.parent.conf.Producer.Compression == CompressionNone { - req.AddSet(topic, partition, set.recordsToSend.msgSet) + req.AddSet(topic, partition, set.recordsToSend.MsgSet) } else { // When compression is enabled, the entire set for each partition is compressed // and sent as the payload of a single fake "message" with the appropriate codec @@ -147,24 +155,25 @@ func (ps *produceSet) buildRequest() *ProduceRequest { // recompressing the message set. // (See https://cwiki.apache.org/confluence/display/KAFKA/KIP-31+-+Move+to+relative+offsets+in+compressed+message+sets // for details on relative offsets.) - for i, msg := range set.recordsToSend.msgSet.Messages { + for i, msg := range set.recordsToSend.MsgSet.Messages { msg.Offset = int64(i) } } - payload, err := encode(set.recordsToSend.msgSet, ps.parent.conf.MetricRegistry) + payload, err := encode(set.recordsToSend.MsgSet, ps.parent.conf.MetricRegistry) if err != nil { Logger.Println(err) // if this happens, it's basically our fault. panic(err) } compMsg := &Message{ - Codec: ps.parent.conf.Producer.Compression, - Key: nil, - Value: payload, - Set: set.recordsToSend.msgSet, // Provide the underlying message set for accurate metrics + Codec: ps.parent.conf.Producer.Compression, + CompressionLevel: ps.parent.conf.Producer.CompressionLevel, + Key: nil, + Value: payload, + Set: set.recordsToSend.MsgSet, // Provide the underlying message set for accurate metrics } if ps.parent.conf.Version.IsAtLeast(V0_10_0_0) { compMsg.Version = 1 - compMsg.Timestamp = set.recordsToSend.msgSet.Messages[0].Msg.Timestamp + compMsg.Timestamp = set.recordsToSend.MsgSet.Messages[0].Msg.Timestamp } req.AddMessage(topic, partition, compMsg) } diff --git a/vendor/github.com/Shopify/sarama/record_batch.go b/vendor/github.com/Shopify/sarama/record_batch.go index 321de485b0d..845318aa341 100644 --- a/vendor/github.com/Shopify/sarama/record_batch.go +++ b/vendor/github.com/Shopify/sarama/record_batch.go @@ -40,6 +40,7 @@ type RecordBatch struct { PartitionLeaderEpoch int32 Version int8 Codec CompressionCodec + CompressionLevel int Control bool LastOffsetDelta int32 FirstTimestamp time.Time @@ -219,7 +220,15 @@ func (b *RecordBatch) encodeRecords(pe packetEncoder) error { b.compressedRecords = raw case CompressionGZIP: var buf bytes.Buffer - writer := gzip.NewWriter(&buf) + var writer *gzip.Writer + if b.CompressionLevel != CompressionLevelDefault { + writer, err = gzip.NewWriterLevel(&buf, b.CompressionLevel) + if err != nil { + return err + } + } else { + writer = gzip.NewWriter(&buf) + } if _, err := writer.Write(raw); err != nil { return err } diff --git a/vendor/github.com/Shopify/sarama/records.go b/vendor/github.com/Shopify/sarama/records.go index 258dcbac880..301055bb070 100644 --- a/vendor/github.com/Shopify/sarama/records.go +++ b/vendor/github.com/Shopify/sarama/records.go @@ -14,30 +14,30 @@ const ( // Records implements a union type containing either a RecordBatch or a legacy MessageSet. type Records struct { recordsType int - msgSet *MessageSet - recordBatch *RecordBatch + MsgSet *MessageSet + RecordBatch *RecordBatch } func newLegacyRecords(msgSet *MessageSet) Records { - return Records{recordsType: legacyRecords, msgSet: msgSet} + return Records{recordsType: legacyRecords, MsgSet: msgSet} } func newDefaultRecords(batch *RecordBatch) Records { - return Records{recordsType: defaultRecords, recordBatch: batch} + return Records{recordsType: defaultRecords, RecordBatch: batch} } -// setTypeFromFields sets type of Records depending on which of msgSet or recordBatch is not nil. +// setTypeFromFields sets type of Records depending on which of MsgSet or RecordBatch is not nil. // The first return value indicates whether both fields are nil (and the type is not set). // If both fields are not nil, it returns an error. func (r *Records) setTypeFromFields() (bool, error) { - if r.msgSet == nil && r.recordBatch == nil { + if r.MsgSet == nil && r.RecordBatch == nil { return true, nil } - if r.msgSet != nil && r.recordBatch != nil { - return false, fmt.Errorf("both msgSet and recordBatch are set, but record type is unknown") + if r.MsgSet != nil && r.RecordBatch != nil { + return false, fmt.Errorf("both MsgSet and RecordBatch are set, but record type is unknown") } r.recordsType = defaultRecords - if r.msgSet != nil { + if r.MsgSet != nil { r.recordsType = legacyRecords } return false, nil @@ -52,15 +52,15 @@ func (r *Records) encode(pe packetEncoder) error { switch r.recordsType { case legacyRecords: - if r.msgSet == nil { + if r.MsgSet == nil { return nil } - return r.msgSet.encode(pe) + return r.MsgSet.encode(pe) case defaultRecords: - if r.recordBatch == nil { + if r.RecordBatch == nil { return nil } - return r.recordBatch.encode(pe) + return r.RecordBatch.encode(pe) } return fmt.Errorf("unknown records type: %v", r.recordsType) @@ -89,11 +89,11 @@ func (r *Records) decode(pd packetDecoder) error { switch r.recordsType { case legacyRecords: - r.msgSet = &MessageSet{} - return r.msgSet.decode(pd) + r.MsgSet = &MessageSet{} + return r.MsgSet.decode(pd) case defaultRecords: - r.recordBatch = &RecordBatch{} - return r.recordBatch.decode(pd) + r.RecordBatch = &RecordBatch{} + return r.RecordBatch.decode(pd) } return fmt.Errorf("unknown records type: %v", r.recordsType) } @@ -107,15 +107,15 @@ func (r *Records) numRecords() (int, error) { switch r.recordsType { case legacyRecords: - if r.msgSet == nil { + if r.MsgSet == nil { return 0, nil } - return len(r.msgSet.Messages), nil + return len(r.MsgSet.Messages), nil case defaultRecords: - if r.recordBatch == nil { + if r.RecordBatch == nil { return 0, nil } - return len(r.recordBatch.Records), nil + return len(r.RecordBatch.Records), nil } return 0, fmt.Errorf("unknown records type: %v", r.recordsType) } @@ -131,15 +131,15 @@ func (r *Records) isPartial() (bool, error) { case unknownRecords: return false, nil case legacyRecords: - if r.msgSet == nil { + if r.MsgSet == nil { return false, nil } - return r.msgSet.PartialTrailingMessage, nil + return r.MsgSet.PartialTrailingMessage, nil case defaultRecords: - if r.recordBatch == nil { + if r.RecordBatch == nil { return false, nil } - return r.recordBatch.PartialTrailingRecord, nil + return r.RecordBatch.PartialTrailingRecord, nil } return false, fmt.Errorf("unknown records type: %v", r.recordsType) } @@ -155,10 +155,10 @@ func (r *Records) isControl() (bool, error) { case legacyRecords: return false, nil case defaultRecords: - if r.recordBatch == nil { + if r.RecordBatch == nil { return false, nil } - return r.recordBatch.Control, nil + return r.RecordBatch.Control, nil } return false, fmt.Errorf("unknown records type: %v", r.recordsType) } diff --git a/vendor/github.com/Shopify/sarama/request.go b/vendor/github.com/Shopify/sarama/request.go index 5f7cb76e95b..4d211a14f17 100644 --- a/vendor/github.com/Shopify/sarama/request.go +++ b/vendor/github.com/Shopify/sarama/request.go @@ -97,7 +97,7 @@ func allocateBody(key, version int16) protocolBody { case 9: return &OffsetFetchRequest{} case 10: - return &ConsumerMetadataRequest{} + return &FindCoordinatorRequest{} case 11: return &JoinGroupRequest{} case 12: @@ -118,6 +118,8 @@ func allocateBody(key, version int16) protocolBody { return &CreateTopicsRequest{} case 20: return &DeleteTopicsRequest{} + case 21: + return &DeleteRecordsRequest{} case 22: return &InitProducerIDRequest{} case 24: @@ -140,6 +142,8 @@ func allocateBody(key, version int16) protocolBody { return &AlterConfigsRequest{} case 37: return &CreatePartitionsRequest{} + case 42: + return &DeleteGroupsRequest{} } return nil } diff --git a/vendor/github.com/Shopify/sarama/utils.go b/vendor/github.com/Shopify/sarama/utils.go index 9d7b60f1614..702e2262701 100644 --- a/vendor/github.com/Shopify/sarama/utils.go +++ b/vendor/github.com/Shopify/sarama/utils.go @@ -139,21 +139,49 @@ func (v KafkaVersion) IsAtLeast(other KafkaVersion) bool { // Effective constants defining the supported kafka versions. var ( - V0_8_2_0 = newKafkaVersion(0, 8, 2, 0) - V0_8_2_1 = newKafkaVersion(0, 8, 2, 1) - V0_8_2_2 = newKafkaVersion(0, 8, 2, 2) - V0_9_0_0 = newKafkaVersion(0, 9, 0, 0) - V0_9_0_1 = newKafkaVersion(0, 9, 0, 1) - V0_10_0_0 = newKafkaVersion(0, 10, 0, 0) - V0_10_0_1 = newKafkaVersion(0, 10, 0, 1) - V0_10_1_0 = newKafkaVersion(0, 10, 1, 0) - V0_10_2_0 = newKafkaVersion(0, 10, 2, 0) - V0_11_0_0 = newKafkaVersion(0, 11, 0, 0) - V1_0_0_0 = newKafkaVersion(1, 0, 0, 0) - minVersion = V0_8_2_0 + V0_8_2_0 = newKafkaVersion(0, 8, 2, 0) + V0_8_2_1 = newKafkaVersion(0, 8, 2, 1) + V0_8_2_2 = newKafkaVersion(0, 8, 2, 2) + V0_9_0_0 = newKafkaVersion(0, 9, 0, 0) + V0_9_0_1 = newKafkaVersion(0, 9, 0, 1) + V0_10_0_0 = newKafkaVersion(0, 10, 0, 0) + V0_10_0_1 = newKafkaVersion(0, 10, 0, 1) + V0_10_1_0 = newKafkaVersion(0, 10, 1, 0) + V0_10_1_1 = newKafkaVersion(0, 10, 1, 1) + V0_10_2_0 = newKafkaVersion(0, 10, 2, 0) + V0_10_2_1 = newKafkaVersion(0, 10, 2, 1) + V0_11_0_0 = newKafkaVersion(0, 11, 0, 0) + V0_11_0_1 = newKafkaVersion(0, 11, 0, 1) + V0_11_0_2 = newKafkaVersion(0, 11, 0, 2) + V1_0_0_0 = newKafkaVersion(1, 0, 0, 0) + V1_1_0_0 = newKafkaVersion(1, 1, 0, 0) + + SupportedVersions = []KafkaVersion{ + V0_8_2_0, + V0_8_2_1, + V0_8_2_2, + V0_9_0_0, + V0_9_0_1, + V0_10_0_0, + V0_10_0_1, + V0_10_1_0, + V0_10_1_1, + V0_10_2_0, + V0_10_2_1, + V0_11_0_0, + V0_11_0_1, + V0_11_0_2, + V1_0_0_0, + V1_1_0_0, + } + MinVersion = V0_8_2_0 + MaxVersion = V1_1_0_0 ) func ParseKafkaVersion(s string) (KafkaVersion, error) { + if len(s) < 5 { + return MinVersion, fmt.Errorf("invalid version `%s`", s) + } var major, minor, veryMinor, patch uint var err error if s[0] == '0' { @@ -162,7 +190,7 @@ func ParseKafkaVersion(s string) (KafkaVersion, error) { err = scanKafkaVersion(s, `^\d+\.\d+\.\d+$`, "%d.%d.%d", [3]*uint{&major, &minor, &veryMinor}) } if err != nil { - return minVersion, err + return MinVersion, err } return newKafkaVersion(major, minor, veryMinor, patch), nil } diff --git a/vendor/github.com/elastic/beats/NOTICE.txt b/vendor/github.com/elastic/beats/NOTICE.txt index e8a9677f0ec..31d8d98bce3 100644 --- a/vendor/github.com/elastic/beats/NOTICE.txt +++ b/vendor/github.com/elastic/beats/NOTICE.txt @@ -116,11 +116,10 @@ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------- -Dependency: github.com/boltdb/bolt -Version: v1.3.1 -Revision: 2f1ce7a837dcb8da3ec595b1dac9d0632f0f99e8 +Dependency: github.com/coreos/bbolt +Revision: af9db2027c98c61ecd8e17caa5bd265792b9b9a2 License type (autodetected): MIT -./vendor/github.com/boltdb/bolt/LICENSE: +./vendor/github.com/coreos/bbolt/LICENSE: -------------------------------------------------------------------- The MIT License (MIT) @@ -447,8 +446,8 @@ Apache License 2.0 -------------------------------------------------------------------- Dependency: github.com/elastic/go-ucfg -Version: v0.6.0 -Revision: 9c66f5c432b1d25bdb449a1e588d58b5d0cd7268 +Version: v0.6.1 +Revision: 581f7b1fe9d84f4c18ef0694d6e0eb944a925dae License type (autodetected): Apache-2.0 ./vendor/github.com/elastic/go-ucfg/LICENSE: -------------------------------------------------------------------- @@ -1083,6 +1082,49 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +-------------------------------------------------------------------- +Dependency: github.com/gogo/protobuf +Revision: 636bf0302bc95575d69441b25a2603156ffdddf1 +License type (autodetected): BSD-3-Clause +./metricbeat/vendor/github.com/gogo/protobuf/LICENSE: +-------------------------------------------------------------------- +Protocol Buffers for Go with Gadgets + +Copyright (c) 2013, The GoGo Authors. All rights reserved. +http://github.com/gogo/protobuf + +Go support for Protocol Buffers - Google's data interchange format + +Copyright 2010 The Go Authors. All rights reserved. +https://github.com/golang/protobuf + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + -------------------------------------------------------------------- Dependency: github.com/golang/protobuf Revision: 2bba0603135d7d7f5cb73b2125beeda19c09f4ef @@ -1380,6 +1422,16 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +-------------------------------------------------------------------- +Dependency: github.com/kubernetes/apimachinery +Version: kubernetes-1.11.1 +Revision: 103fd098999dc9c0c88536f5c9ad2e5da39373ae +License type (autodetected): Apache-2.0 +./metricbeat/vendor/github.com/kubernetes/apimachinery/LICENSE: +-------------------------------------------------------------------- +Apache License 2.0 + + -------------------------------------------------------------------- Dependency: github.com/lib/pq Revision: 2704adc878c21e1329f46f6e56a1c387d788ff94 @@ -1585,8 +1637,7 @@ THE SOFTWARE. -------------------------------------------------------------------- Dependency: github.com/OneOfOne/xxhash -Version: v1.2.2 -Revision: 6def279d2ce6c81a79dd1c1be580f03bb216fb8a +Revision: 2c166c65de755bdafa6ae2959c10ea9df6e8b3e5 License type (autodetected): Apache-2.0 ./vendor/github.com/OneOfOne/xxhash/LICENSE: -------------------------------------------------------------------- @@ -2030,12 +2081,12 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------- Dependency: github.com/Shopify/sarama -Version: v1.16.0/enh/offset-replica-id -Revision: 32b4ad5c9537ed14e471779b76713ff65420db39 +Version: v1.17.0/enh/offset-replica-id +Revision: d1575e4abe04acbbe8ac766320585cdf271dd189 License type (autodetected): MIT ./vendor/github.com/Shopify/sarama/LICENSE: -------------------------------------------------------------------- -Copyright (c) 2013 Evan Huus +Copyright (c) 2013 Shopify Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the @@ -2693,6 +2744,41 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +-------------------------------------------------------------------- +Dependency: gopkg.in/inf.v0 +Revision: d2d2541c53f18d2a059457998ce2876cc8e67cbf +License type (autodetected): BSD-3-Clause +./metricbeat/vendor/gopkg.in/inf.v0/LICENSE: +-------------------------------------------------------------------- +Copyright (c) 2012 Péter Surányi. Portions Copyright (c) 2009 The Go +Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + -------------------------------------------------------------------- Dependency: gopkg.in/mgo.v2 Revision: 3f83fa5005286a7fe593b055f0d7771a7dce4655 diff --git a/vendor/github.com/elastic/beats/dev-tools/mage/fields.go b/vendor/github.com/elastic/beats/dev-tools/mage/fields.go new file mode 100644 index 00000000000..8f658483ffb --- /dev/null +++ b/vendor/github.com/elastic/beats/dev-tools/mage/fields.go @@ -0,0 +1,48 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "path/filepath" + + "github.com/magefile/mage/sh" +) + +// GenerateFieldsYAML generates a fields.yml file for a Beat. This will include +// the common fields specified by libbeat, the common fields for the Beat, +// and any additional fields.yml files you specify. +// +// fieldsFiles specifies additional directories to search recursively for files +// named fields.yml. The contents of each fields.yml will be included in the +// generated file. +func GenerateFieldsYAML(fieldsFiles ...string) error { + const globalFieldsCmdPath = "libbeat/scripts/cmd/global_fields/main.go" + + beatsDir, err := ElasticBeatsDir() + if err != nil { + return err + } + + globalFieldsCmd := sh.RunCmd("go", "run", + filepath.Join(beatsDir, globalFieldsCmdPath), + "-es_beats_path", beatsDir, + "-beat_path", CWD(), + ) + + return globalFieldsCmd(fieldsFiles...) +} diff --git a/vendor/github.com/elastic/beats/libbeat/cmd/setup.go b/vendor/github.com/elastic/beats/libbeat/cmd/setup.go index 67343507cb0..d8745cf765f 100644 --- a/vendor/github.com/elastic/beats/libbeat/cmd/setup.go +++ b/vendor/github.com/elastic/beats/libbeat/cmd/setup.go @@ -34,7 +34,7 @@ func genSetupCmd(name, idxPrefix, version string, beatCreator beat.Creator) *cob Long: `This command does initial setup of the environment: * Index mapping template in Elasticsearch to ensure fields are mapped. - * Kibana dashboards and index patterns (where available). + * Kibana dashboards (where available). * ML jobs (where available). * Ingest pipelines (where available). `, diff --git a/vendor/github.com/elastic/beats/libbeat/common/field.go b/vendor/github.com/elastic/beats/libbeat/common/field.go index 115b05636e5..d69a75e90bf 100644 --- a/vendor/github.com/elastic/beats/libbeat/common/field.go +++ b/vendor/github.com/elastic/beats/libbeat/common/field.go @@ -52,6 +52,7 @@ type Field struct { DocValues *bool `config:"doc_values"` CopyTo string `config:"copy_to"` IgnoreAbove int `config:"ignore_above"` + AliasPath string `config:"path"` // Kibana specific Analyzed *bool `config:"analyzed"` diff --git a/vendor/github.com/elastic/beats/libbeat/fields.yml b/vendor/github.com/elastic/beats/libbeat/fields.yml new file mode 100644 index 00000000000..9d630416b84 --- /dev/null +++ b/vendor/github.com/elastic/beats/libbeat/fields.yml @@ -0,0 +1,216 @@ + +- key: beat + title: Beat + description: > + Contains common beat fields available in all event types. + fields: + + - name: beat.name + description: > + The name of the Beat sending the log messages. If the Beat name is + set in the configuration file, then that value is used. If it is not + set, the hostname is used. To set the Beat name, use the `name` + option in the configuration file. + - name: beat.hostname + description: > + The hostname as returned by the operating system on which the Beat is + running. + - name: beat.timezone + description: > + The timezone as returned by the operating system on which the Beat is + running. + - name: beat.version + description: > + The version of the beat that generated this event. + + - name: "@timestamp" + type: date + required: true + format: date + example: August 26th 2016, 12:35:53.332 + description: > + The timestamp when the event log record was generated. + + - name: tags + description: > + Arbitrary tags that can be set per Beat and per transaction + type. + + - name: fields + type: object + object_type: keyword + description: > + Contains user configurable fields. + + - name: error + type: group + description: > + Error fields containing additional info in case of errors. + fields: + - name: message + type: text + description: > + Error message. + - name: code + type: long + description: > + Error code. + - name: type + type: keyword + description: > + Error type. +- key: cloud + title: Cloud provider metadata + description: > + Metadata from cloud providers added by the add_cloud_metadata processor. + fields: + + - name: meta.cloud.provider + example: ec2 + description: > + Name of the cloud provider. Possible values are ec2, gce, or digitalocean. + + - name: meta.cloud.instance_id + description: > + Instance ID of the host machine. + + - name: meta.cloud.instance_name + description: > + Instance name of the host machine. + + - name: meta.cloud.machine_type + example: t2.medium + description: > + Machine type of the host machine. + + - name: meta.cloud.availability_zone + example: us-east-1c + description: > + Availability zone in which this host is running. + + - name: meta.cloud.project_id + example: project-x + description: > + Name of the project in Google Cloud. + + - name: meta.cloud.region + description: > + Region in which this host is running. +- key: docker + title: Docker + description: > + Docker stats collected from Docker. + short_config: false + anchor: docker-processor + fields: + - name: docker + type: group + fields: + - name: container.id + type: keyword + description: > + Unique container id. + - name: container.image + type: keyword + description: > + Name of the image the container was built on. + - name: container.name + type: keyword + description: > + Container name. + - name: container.labels + type: object + object_type: keyword + description: > + Image labels. +- key: host + title: Host + description: > + Info collected for the host machine. + anchor: host-processor + fields: + - name: host + type: group + fields: + - name: name + type: keyword + description: > + Hostname. + - name: id + type: keyword + description: > + Unique host id. + - name: architecture + type: keyword + description: > + Host architecture (e.g. x86_64, arm, ppc, mips). + - name: os.platform + type: keyword + description: > + OS platform (e.g. centos, ubuntu, windows). + - name: os.version + type: keyword + description: > + OS version. + - name: os.family + type: keyword + description: > + OS family (e.g. redhat, debian, freebsd, windows). + - name: ip + type: ip + description: > + List of IP-addresses. + - name: mac + type: keyword + description: > + List of hardware-addresses, usually MAC-addresses. + +- key: kubernetes + title: Kubernetes + description: > + Kubernetes metadata added by the kubernetes processor + short_config: false + anchor: kubernetes-processor + fields: + - name: kubernetes + type: group + fields: + - name: pod.name + type: keyword + description: > + Kubernetes pod name + + - name: pod.uid + type: keyword + description: > + Kubernetes Pod UID + + - name: namespace + type: keyword + description: > + Kubernetes namespace + + - name: node.name + type: keyword + description: > + Kubernetes node name + + - name: labels + type: object + description: > + Kubernetes labels map + + - name: annotations + type: object + description: > + Kubernetes annotations map + + - name: container.name + type: keyword + description: > + Kubernetes container name + + - name: container.image + type: keyword + description: > + Kubernetes container image diff --git a/vendor/github.com/elastic/beats/libbeat/generator/fields/fields.go b/vendor/github.com/elastic/beats/libbeat/generator/fields/fields.go index 68365dced93..02ec5374948 100644 --- a/vendor/github.com/elastic/beats/libbeat/generator/fields/fields.go +++ b/vendor/github.com/elastic/beats/libbeat/generator/fields/fields.go @@ -20,12 +20,13 @@ package fields import ( "bufio" "bytes" - "fmt" "io/ioutil" "os" "path" "path/filepath" "strings" + + "github.com/pkg/errors" ) var ( @@ -152,7 +153,7 @@ func createIfNotExists(inPath, outPath string) error { if os.IsNotExist(err) { err := copyFileWithFlag(inPath, outPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC) if err != nil { - fmt.Println("Cannot find _meta/fields.yml") + return err } return nil } @@ -162,12 +163,17 @@ func createIfNotExists(inPath, outPath string) error { func copyFileWithFlag(in, out string, flag int) error { input, err := ioutil.ReadFile(in) if err != nil { - return err + return errors.Wrap(err, "failed to read source in copy") + } + + if err := os.MkdirAll(filepath.Dir(out), 0755); err != nil { + return errors.Wrapf(err, "failed to create destination dir for copy "+ + "at %v", filepath.Dir(out)) } output, err := os.OpenFile(out, flag, 0644) if err != nil { - return err + return errors.Wrap(err, "failed to open destination file for copy") } defer output.Close() diff --git a/vendor/github.com/elastic/beats/libbeat/metric/system/cpu/cpu.go b/vendor/github.com/elastic/beats/libbeat/metric/system/cpu/cpu.go index a78b93daadb..b5cb6981fb8 100644 --- a/vendor/github.com/elastic/beats/libbeat/metric/system/cpu/cpu.go +++ b/vendor/github.com/elastic/beats/libbeat/metric/system/cpu/cpu.go @@ -116,7 +116,9 @@ func cpuPercentages(s0, s1 *sigar.Cpu, numCPU int) Percentages { } calculateTotalPct := func() float64 { - return common.Round(float64(numCPU)-calculatePct(s0.Idle, s1.Idle), common.DefaultDecimalPlacesCount) + // IOWait time is excluded from the total as per #7627. + idle := calculatePct(s0.Idle, s1.Idle) + calculatePct(s0.Wait, s1.Wait) + return common.Round(float64(numCPU)-idle, common.DefaultDecimalPlacesCount) } return Percentages{ diff --git a/vendor/github.com/elastic/beats/libbeat/outputs/kafka/config.go b/vendor/github.com/elastic/beats/libbeat/outputs/kafka/config.go index 2e9a0152d1f..956a8bd8cb4 100644 --- a/vendor/github.com/elastic/beats/libbeat/outputs/kafka/config.go +++ b/vendor/github.com/elastic/beats/libbeat/outputs/kafka/config.go @@ -36,25 +36,26 @@ import ( ) type kafkaConfig struct { - Hosts []string `config:"hosts" validate:"required"` - TLS *tlscommon.Config `config:"ssl"` - Timeout time.Duration `config:"timeout" validate:"min=1"` - Metadata metaConfig `config:"metadata"` - Key *fmtstr.EventFormatString `config:"key"` - Partition map[string]*common.Config `config:"partition"` - KeepAlive time.Duration `config:"keep_alive" validate:"min=0"` - MaxMessageBytes *int `config:"max_message_bytes" validate:"min=1"` - RequiredACKs *int `config:"required_acks" validate:"min=-1"` - BrokerTimeout time.Duration `config:"broker_timeout" validate:"min=1"` - Compression string `config:"compression"` - Version string `config:"version"` - BulkMaxSize int `config:"bulk_max_size"` - MaxRetries int `config:"max_retries" validate:"min=-1,nonzero"` - ClientID string `config:"client_id"` - ChanBufferSize int `config:"channel_buffer_size" validate:"min=1"` - Username string `config:"username"` - Password string `config:"password"` - Codec codec.Config `config:"codec"` + Hosts []string `config:"hosts" validate:"required"` + TLS *tlscommon.Config `config:"ssl"` + Timeout time.Duration `config:"timeout" validate:"min=1"` + Metadata metaConfig `config:"metadata"` + Key *fmtstr.EventFormatString `config:"key"` + Partition map[string]*common.Config `config:"partition"` + KeepAlive time.Duration `config:"keep_alive" validate:"min=0"` + MaxMessageBytes *int `config:"max_message_bytes" validate:"min=1"` + RequiredACKs *int `config:"required_acks" validate:"min=-1"` + BrokerTimeout time.Duration `config:"broker_timeout" validate:"min=1"` + Compression string `config:"compression"` + CompressionLevel int `config:"compression_level"` + Version string `config:"version"` + BulkMaxSize int `config:"bulk_max_size"` + MaxRetries int `config:"max_retries" validate:"min=-1,nonzero"` + ClientID string `config:"client_id"` + ChanBufferSize int `config:"channel_buffer_size" validate:"min=1"` + Username string `config:"username"` + Password string `config:"password"` + Codec codec.Config `config:"codec"` } type metaConfig struct { @@ -89,17 +90,18 @@ func defaultConfig() kafkaConfig { }, RefreshFreq: 10 * time.Minute, }, - KeepAlive: 0, - MaxMessageBytes: nil, // use library default - RequiredACKs: nil, // use library default - BrokerTimeout: 10 * time.Second, - Compression: "gzip", - Version: "1.0.0", - MaxRetries: 3, - ClientID: "beats", - ChanBufferSize: 256, - Username: "", - Password: "", + KeepAlive: 0, + MaxMessageBytes: nil, // use library default + RequiredACKs: nil, // use library default + BrokerTimeout: 10 * time.Second, + Compression: "gzip", + CompressionLevel: 4, + Version: "1.0.0", + MaxRetries: 3, + ClientID: "beats", + ChanBufferSize: 256, + Username: "", + Password: "", } } @@ -120,6 +122,13 @@ func (c *kafkaConfig) Validate() error { return fmt.Errorf("password must be set when username is configured") } + if c.Compression == "gzip" { + lvl := c.CompressionLevel + if lvl != sarama.CompressionLevelDefault && !(0 <= lvl && lvl <= 9) { + return fmt.Errorf("compression_level must be between 0 and 9") + } + } + return nil } @@ -138,6 +147,7 @@ func newSaramaConfig(config *kafkaConfig) (*sarama.Config, error) { k.Net.WriteTimeout = timeout k.Net.KeepAlive = config.KeepAlive k.Producer.Timeout = config.BrokerTimeout + k.Producer.CompressionLevel = config.CompressionLevel tls, err := outputs.LoadTLSConfig(config.TLS) if err != nil { diff --git a/vendor/github.com/elastic/beats/libbeat/outputs/kafka/version.go b/vendor/github.com/elastic/beats/libbeat/outputs/kafka/version.go index d171520a10c..884e67616a2 100644 --- a/vendor/github.com/elastic/beats/libbeat/outputs/kafka/version.go +++ b/vendor/github.com/elastic/beats/libbeat/outputs/kafka/version.go @@ -27,7 +27,8 @@ var ( v0_11_0_1 = parseKafkaVersion("0.11.0.1") v0_11_0_2 = parseKafkaVersion("0.11.0.2") v1_0_1 = parseKafkaVersion("1.0.1") - v1_1_0 = parseKafkaVersion("1.1.0") + v1_0_2 = parseKafkaVersion("1.0.2") + v1_1_1 = parseKafkaVersion("1.1.1") kafkaVersions = map[string]sarama.KafkaVersion{ "": sarama.V1_0_0_0, @@ -61,10 +62,12 @@ var ( "1.0.0": sarama.V1_0_0_0, "1.0.1": v1_0_1, - "1.0": v1_0_1, - "1.1.0": v1_1_0, - "1.1": v1_1_0, - "1": v1_1_0, + "1.0.2": v1_0_2, + "1.0": v1_0_2, + "1.1.0": sarama.V1_1_0_0, + "1.1.1": v1_1_1, + "1.1": v1_1_1, + "1": v1_1_1, } ) diff --git a/vendor/github.com/elastic/beats/libbeat/plugin/cli.go b/vendor/github.com/elastic/beats/libbeat/plugin/cli.go index 40a356211ad..6171bef7e6b 100644 --- a/vendor/github.com/elastic/beats/libbeat/plugin/cli.go +++ b/vendor/github.com/elastic/beats/libbeat/plugin/cli.go @@ -24,6 +24,7 @@ import ( "flag" "strings" + "github.com/elastic/beats/libbeat/common/cfgwarn" "github.com/elastic/beats/libbeat/logp" ) @@ -54,7 +55,7 @@ func init() { func Initialize() error { if len(plugins.paths) > 0 { - logp.Warn("EXPERIMENTAL: loadable plugin support is experimental") + cfgwarn.Experimental("loadable plugin support is experimental") } for _, path := range plugins.paths { diff --git a/vendor/github.com/elastic/beats/libbeat/processors/actions/rename.go b/vendor/github.com/elastic/beats/libbeat/processors/actions/rename.go index 47e367b9b5f..bd39f93ab74 100644 --- a/vendor/github.com/elastic/beats/libbeat/processors/actions/rename.go +++ b/vendor/github.com/elastic/beats/libbeat/processors/actions/rename.go @@ -24,7 +24,6 @@ import ( "github.com/elastic/beats/libbeat/beat" "github.com/elastic/beats/libbeat/common" - "github.com/elastic/beats/libbeat/common/cfgwarn" "github.com/elastic/beats/libbeat/logp" "github.com/elastic/beats/libbeat/processors" ) @@ -51,8 +50,6 @@ func init() { } func newRenameFields(c *common.Config) (processors.Processor, error) { - - cfgwarn.Beta("Beta rename processor is used.") config := renameFieldsConfig{ IgnoreMissing: false, FailOnError: true, diff --git a/vendor/github.com/elastic/beats/libbeat/scripts/cmd/global_fields/main.go b/vendor/github.com/elastic/beats/libbeat/scripts/cmd/global_fields/main.go new file mode 100644 index 00000000000..74fc94a603d --- /dev/null +++ b/vendor/github.com/elastic/beats/libbeat/scripts/cmd/global_fields/main.go @@ -0,0 +1,78 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package main + +import ( + "flag" + "fmt" + "os" + "path/filepath" + + "github.com/elastic/beats/libbeat/generator/fields" +) + +func main() { + esBeatsPath := flag.String("es_beats_path", "..", "Path to elastic/beats") + beatPath := flag.String("beat_path", ".", "Path to your Beat") + flag.Parse() + + beatFieldsPaths := flag.Args() + name := filepath.Base(*beatPath) + + if *beatPath == "" { + fmt.Fprintf(os.Stderr, "beat_path cannot be empty") + os.Exit(1) + } + + err := os.MkdirAll(filepath.Join(*beatPath, "_meta"), 0755) + if err != nil { + fmt.Fprintf(os.Stderr, "Cannot create _meta dir for %s: %+v\n", name, err) + os.Exit(1) + } + + if len(beatFieldsPaths) == 0 { + fmt.Println("No field files to collect") + err = fields.AppendFromLibbeat(*esBeatsPath, *beatPath) + if err != nil { + fmt.Fprintf(os.Stderr, "Cannot generate global fields.yml for %s: %+v\n", name, err) + os.Exit(2) + } + return + } + + var fieldsFiles []*fields.YmlFile + for _, fieldsFilePath := range beatFieldsPaths { + pathToModules := filepath.Join(*beatPath, fieldsFilePath) + + fieldsFile, err := fields.CollectModuleFiles(pathToModules) + if err != nil { + fmt.Fprintf(os.Stderr, "Cannot collect fields.yml files: %+v\n", err) + os.Exit(2) + } + + fieldsFiles = append(fieldsFiles, fieldsFile...) + } + + err = fields.Generate(*esBeatsPath, *beatPath, fieldsFiles) + if err != nil { + fmt.Fprintf(os.Stderr, "Cannot generate global fields.yml file for %s: %+v\n", name, err) + os.Exit(3) + } + + fmt.Printf("Generated fields.yml for %s\n", name) +} diff --git a/vendor/github.com/elastic/beats/libbeat/template/processor.go b/vendor/github.com/elastic/beats/libbeat/template/processor.go index d87c2348d66..32399b5fbb8 100644 --- a/vendor/github.com/elastic/beats/libbeat/template/processor.go +++ b/vendor/github.com/elastic/beats/libbeat/template/processor.go @@ -62,6 +62,8 @@ func (p *Processor) Process(fields common.Fields, path string, output common.Map mapping = p.object(&field) case "array": mapping = p.array(&field) + case "alias": + mapping = p.alias(&field) case "group": var newPath string if path == "" { @@ -242,6 +244,13 @@ func (p *Processor) array(f *common.Field) common.MapStr { return properties } +func (p *Processor) alias(f *common.Field) common.MapStr { + properties := getDefaultProperties(f) + properties["type"] = "alias" + properties["path"] = f.AliasPath + return properties +} + func (p *Processor) object(f *common.Field) common.MapStr { dynProperties := getDefaultProperties(f) diff --git a/vendor/github.com/elastic/go-ucfg/CHANGELOG.md b/vendor/github.com/elastic/go-ucfg/CHANGELOG.md index dd64f2609c7..53a0e6169d6 100644 --- a/vendor/github.com/elastic/go-ucfg/CHANGELOG.md +++ b/vendor/github.com/elastic/go-ucfg/CHANGELOG.md @@ -14,6 +14,11 @@ This project adheres to [Semantic Versioning](http://semver.org/). ### Fixed +## [0.6.1] + +### Fixed +- Ignore flag keys with missing values. #111 + ## [0.6.0] ### Added @@ -197,7 +202,8 @@ This project adheres to [Semantic Versioning](http://semver.org/). - Introduced CHANGELOG.md for documenting changes to ucfg. -[Unreleased]: https://github.com/elastic/go-ucfg/compare/v0.6.0...HEAD +[Unreleased]: https://github.com/elastic/go-ucfg/compare/v0.6.1...HEAD +[0.6.1]: https://github.com/elastic/go-ucfg/compare/v0.6.0...v0.6.1 [0.6.0]: https://github.com/elastic/go-ucfg/compare/v0.5.1...v0.6.0 [0.5.1]: https://github.com/elastic/go-ucfg/compare/v0.5.0...v0.5.1 [0.5.0]: https://github.com/elastic/go-ucfg/compare/v0.4.6...v0.5.0 diff --git a/vendor/github.com/elastic/go-ucfg/README.md b/vendor/github.com/elastic/go-ucfg/README.md index 446a179703b..515d38d5f9c 100644 --- a/vendor/github.com/elastic/go-ucfg/README.md +++ b/vendor/github.com/elastic/go-ucfg/README.md @@ -17,26 +17,25 @@ The full API Documentation can be found [here](https://godoc.org/github.com/elas A few examples on how ucfg can be used. All examples below assume, that the following packages are imported: -``` +```golang import ( "github.com/elastic/go-ucfg" "github.com/elastic/go-ucfg/yaml" ) ``` - -### Dot notations +### Dot notations ufcg allows you to load yaml configuration files using dots instead of indentation. For example instead of having: -``` +```yaml config: user: name ``` with ucfg you can write: -``` +```yaml config.user: name ``` @@ -44,7 +43,7 @@ This makes configurations easier and simpler. To load such a config file in Golang, use the following command: -``` +```golang config, err := yaml.NewConfigWithFile(path, ucfg.PathSep(".")) ``` @@ -55,7 +54,7 @@ config, err := yaml.NewConfigWithFile(path, ucfg.PathSep(".")) ucfg allows to automatically validate fields and set defaults for fields in case they are not defined. -``` +```golang // Defines struct to read config from type ExampleConfig struct { Counter string `config:"counter" validate:"min=0, max=9"` @@ -90,4 +89,4 @@ The above uses `Counter` as the config variable. ucfg assures that the value is ucfg has the following requirements: -* Golang 1.5+ +* Golang 1.7+ diff --git a/vendor/github.com/elastic/go-ucfg/flag/value.go b/vendor/github.com/elastic/go-ucfg/flag/value.go index 782cfc06f47..f75816badb7 100644 --- a/vendor/github.com/elastic/go-ucfg/flag/value.go +++ b/vendor/github.com/elastic/go-ucfg/flag/value.go @@ -62,6 +62,10 @@ func NewFlagKeyValue(cfg *ucfg.Config, autoBool bool, opts ...ucfg.Option) *Flag val = true } else { key = args[0] + if args[1] == "" { + return nil, nil, nil + } + val, err = parse.Value(args[1]) if err != nil { return nil, err, err diff --git a/vendor/github.com/klauspost/compress/flate/gen.go b/vendor/github.com/klauspost/compress/flate/gen.go new file mode 100644 index 00000000000..154c89a488e --- /dev/null +++ b/vendor/github.com/klauspost/compress/flate/gen.go @@ -0,0 +1,265 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// This program generates fixedhuff.go +// Invoke as +// +// go run gen.go -output fixedhuff.go + +package main + +import ( + "bytes" + "flag" + "fmt" + "go/format" + "io/ioutil" + "log" +) + +var filename = flag.String("output", "fixedhuff.go", "output file name") + +const maxCodeLen = 16 + +// Note: the definition of the huffmanDecoder struct is copied from +// inflate.go, as it is private to the implementation. + +// chunk & 15 is number of bits +// chunk >> 4 is value, including table link + +const ( + huffmanChunkBits = 9 + huffmanNumChunks = 1 << huffmanChunkBits + huffmanCountMask = 15 + huffmanValueShift = 4 +) + +type huffmanDecoder struct { + min int // the minimum code length + chunks [huffmanNumChunks]uint32 // chunks as described above + links [][]uint32 // overflow links + linkMask uint32 // mask the width of the link table +} + +// Initialize Huffman decoding tables from array of code lengths. +// Following this function, h is guaranteed to be initialized into a complete +// tree (i.e., neither over-subscribed nor under-subscribed). The exception is a +// degenerate case where the tree has only a single symbol with length 1. Empty +// trees are permitted. +func (h *huffmanDecoder) init(bits []int) bool { + // Sanity enables additional runtime tests during Huffman + // table construction. It's intended to be used during + // development to supplement the currently ad-hoc unit tests. + const sanity = false + + if h.min != 0 { + *h = huffmanDecoder{} + } + + // Count number of codes of each length, + // compute min and max length. + var count [maxCodeLen]int + var min, max int + for _, n := range bits { + if n == 0 { + continue + } + if min == 0 || n < min { + min = n + } + if n > max { + max = n + } + count[n]++ + } + + // Empty tree. The decompressor.huffSym function will fail later if the tree + // is used. Technically, an empty tree is only valid for the HDIST tree and + // not the HCLEN and HLIT tree. However, a stream with an empty HCLEN tree + // is guaranteed to fail since it will attempt to use the tree to decode the + // codes for the HLIT and HDIST trees. Similarly, an empty HLIT tree is + // guaranteed to fail later since the compressed data section must be + // composed of at least one symbol (the end-of-block marker). + if max == 0 { + return true + } + + code := 0 + var nextcode [maxCodeLen]int + for i := min; i <= max; i++ { + code <<= 1 + nextcode[i] = code + code += count[i] + } + + // Check that the coding is complete (i.e., that we've + // assigned all 2-to-the-max possible bit sequences). + // Exception: To be compatible with zlib, we also need to + // accept degenerate single-code codings. See also + // TestDegenerateHuffmanCoding. + if code != 1< huffmanChunkBits { + numLinks := 1 << (uint(max) - huffmanChunkBits) + h.linkMask = uint32(numLinks - 1) + + // create link tables + link := nextcode[huffmanChunkBits+1] >> 1 + h.links = make([][]uint32, huffmanNumChunks-link) + for j := uint(link); j < huffmanNumChunks; j++ { + reverse := int(reverseByte[j>>8]) | int(reverseByte[j&0xff])<<8 + reverse >>= uint(16 - huffmanChunkBits) + off := j - uint(link) + if sanity && h.chunks[reverse] != 0 { + panic("impossible: overwriting existing chunk") + } + h.chunks[reverse] = uint32(off<>8]) | int(reverseByte[code&0xff])<<8 + reverse >>= uint(16 - n) + if n <= huffmanChunkBits { + for off := reverse; off < len(h.chunks); off += 1 << uint(n) { + // We should never need to overwrite + // an existing chunk. Also, 0 is + // never a valid chunk, because the + // lower 4 "count" bits should be + // between 1 and 15. + if sanity && h.chunks[off] != 0 { + panic("impossible: overwriting existing chunk") + } + h.chunks[off] = chunk + } + } else { + j := reverse & (huffmanNumChunks - 1) + if sanity && h.chunks[j]&huffmanCountMask != huffmanChunkBits+1 { + // Longer codes should have been + // associated with a link table above. + panic("impossible: not an indirect chunk") + } + value := h.chunks[j] >> huffmanValueShift + linktab := h.links[value] + reverse >>= huffmanChunkBits + for off := reverse; off < len(linktab); off += 1 << uint(n-huffmanChunkBits) { + if sanity && linktab[off] != 0 { + panic("impossible: overwriting existing chunk") + } + linktab[off] = chunk + } + } + } + + if sanity { + // Above we've sanity checked that we never overwrote + // an existing entry. Here we additionally check that + // we filled the tables completely. + for i, chunk := range h.chunks { + if chunk == 0 { + // As an exception, in the degenerate + // single-code case, we allow odd + // chunks to be missing. + if code == 1 && i%2 == 1 { + continue + } + panic("impossible: missing chunk") + } + } + for _, linktab := range h.links { + for _, chunk := range linktab { + if chunk == 0 { + panic("impossible: missing chunk") + } + } + } + } + + return true +} + +func main() { + flag.Parse() + + var h huffmanDecoder + var bits [288]int + initReverseByte() + for i := 0; i < 144; i++ { + bits[i] = 8 + } + for i := 144; i < 256; i++ { + bits[i] = 9 + } + for i := 256; i < 280; i++ { + bits[i] = 7 + } + for i := 280; i < 288; i++ { + bits[i] = 8 + } + h.init(bits[:]) + if h.links != nil { + log.Fatal("Unexpected links table in fixed Huffman decoder") + } + + var buf bytes.Buffer + + fmt.Fprintf(&buf, `// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file.`+"\n\n") + + fmt.Fprintln(&buf, "package flate") + fmt.Fprintln(&buf) + fmt.Fprintln(&buf, "// autogenerated by go run gen.go -output fixedhuff.go, DO NOT EDIT") + fmt.Fprintln(&buf) + fmt.Fprintln(&buf, "var fixedHuffmanDecoder = huffmanDecoder{") + fmt.Fprintf(&buf, "\t%d,\n", h.min) + fmt.Fprintln(&buf, "\t[huffmanNumChunks]uint32{") + for i := 0; i < huffmanNumChunks; i++ { + if i&7 == 0 { + fmt.Fprintf(&buf, "\t\t") + } else { + fmt.Fprintf(&buf, " ") + } + fmt.Fprintf(&buf, "0x%04x,", h.chunks[i]) + if i&7 == 7 { + fmt.Fprintln(&buf) + } + } + fmt.Fprintln(&buf, "\t},") + fmt.Fprintln(&buf, "\tnil, 0,") + fmt.Fprintln(&buf, "}") + + data, err := format.Source(buf.Bytes()) + if err != nil { + log.Fatal(err) + } + err = ioutil.WriteFile(*filename, data, 0644) + if err != nil { + log.Fatal(err) + } +} + +var reverseByte [256]byte + +func initReverseByte() { + for x := 0; x < 256; x++ { + var result byte + for i := uint(0); i < 8; i++ { + result |= byte(((x >> i) & 1) << (7 - i)) + } + reverseByte[x] = result + } +} diff --git a/vendor/github.com/klauspost/cpuid/private-gen.go b/vendor/github.com/klauspost/cpuid/private-gen.go new file mode 100644 index 00000000000..437333d2922 --- /dev/null +++ b/vendor/github.com/klauspost/cpuid/private-gen.go @@ -0,0 +1,476 @@ +// +build ignore + +package main + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "io" + "io/ioutil" + "log" + "os" + "reflect" + "strings" + "unicode" + "unicode/utf8" +) + +var inFiles = []string{"cpuid.go", "cpuid_test.go"} +var copyFiles = []string{"cpuid_amd64.s", "cpuid_386.s", "detect_ref.go", "detect_intel.go"} +var fileSet = token.NewFileSet() +var reWrites = []rewrite{ + initRewrite("CPUInfo -> cpuInfo"), + initRewrite("Vendor -> vendor"), + initRewrite("Flags -> flags"), + initRewrite("Detect -> detect"), + initRewrite("CPU -> cpu"), +} +var excludeNames = map[string]bool{"string": true, "join": true, "trim": true, + // cpuid_test.go + "t": true, "println": true, "logf": true, "log": true, "fatalf": true, "fatal": true, +} + +var excludePrefixes = []string{"test", "benchmark"} + +func main() { + Package := "private" + parserMode := parser.ParseComments + exported := make(map[string]rewrite) + for _, file := range inFiles { + in, err := os.Open(file) + if err != nil { + log.Fatalf("opening input", err) + } + + src, err := ioutil.ReadAll(in) + if err != nil { + log.Fatalf("reading input", err) + } + + astfile, err := parser.ParseFile(fileSet, file, src, parserMode) + if err != nil { + log.Fatalf("parsing input", err) + } + + for _, rw := range reWrites { + astfile = rw(astfile) + } + + // Inspect the AST and print all identifiers and literals. + var startDecl token.Pos + var endDecl token.Pos + ast.Inspect(astfile, func(n ast.Node) bool { + var s string + switch x := n.(type) { + case *ast.Ident: + if x.IsExported() { + t := strings.ToLower(x.Name) + for _, pre := range excludePrefixes { + if strings.HasPrefix(t, pre) { + return true + } + } + if excludeNames[t] != true { + //if x.Pos() > startDecl && x.Pos() < endDecl { + exported[x.Name] = initRewrite(x.Name + " -> " + t) + } + } + + case *ast.GenDecl: + if x.Tok == token.CONST && x.Lparen > 0 { + startDecl = x.Lparen + endDecl = x.Rparen + // fmt.Printf("Decl:%s -> %s\n", fileSet.Position(startDecl), fileSet.Position(endDecl)) + } + } + if s != "" { + fmt.Printf("%s:\t%s\n", fileSet.Position(n.Pos()), s) + } + return true + }) + + for _, rw := range exported { + astfile = rw(astfile) + } + + var buf bytes.Buffer + + printer.Fprint(&buf, fileSet, astfile) + + // Remove package documentation and insert information + s := buf.String() + ind := strings.Index(buf.String(), "\npackage cpuid") + s = s[ind:] + s = "// Generated, DO NOT EDIT,\n" + + "// but copy it to your own project and rename the package.\n" + + "// See more at http://github.com/klauspost/cpuid\n" + + s + + outputName := Package + string(os.PathSeparator) + file + + err = ioutil.WriteFile(outputName, []byte(s), 0644) + if err != nil { + log.Fatalf("writing output: %s", err) + } + log.Println("Generated", outputName) + } + + for _, file := range copyFiles { + dst := "" + if strings.HasPrefix(file, "cpuid") { + dst = Package + string(os.PathSeparator) + file + } else { + dst = Package + string(os.PathSeparator) + "cpuid_" + file + } + err := copyFile(file, dst) + if err != nil { + log.Fatalf("copying file: %s", err) + } + log.Println("Copied", dst) + } +} + +// CopyFile copies a file from src to dst. If src and dst files exist, and are +// the same, then return success. Copy the file contents from src to dst. +func copyFile(src, dst string) (err error) { + sfi, err := os.Stat(src) + if err != nil { + return + } + if !sfi.Mode().IsRegular() { + // cannot copy non-regular files (e.g., directories, + // symlinks, devices, etc.) + return fmt.Errorf("CopyFile: non-regular source file %s (%q)", sfi.Name(), sfi.Mode().String()) + } + dfi, err := os.Stat(dst) + if err != nil { + if !os.IsNotExist(err) { + return + } + } else { + if !(dfi.Mode().IsRegular()) { + return fmt.Errorf("CopyFile: non-regular destination file %s (%q)", dfi.Name(), dfi.Mode().String()) + } + if os.SameFile(sfi, dfi) { + return + } + } + err = copyFileContents(src, dst) + return +} + +// copyFileContents copies the contents of the file named src to the file named +// by dst. The file will be created if it does not already exist. If the +// destination file exists, all it's contents will be replaced by the contents +// of the source file. +func copyFileContents(src, dst string) (err error) { + in, err := os.Open(src) + if err != nil { + return + } + defer in.Close() + out, err := os.Create(dst) + if err != nil { + return + } + defer func() { + cerr := out.Close() + if err == nil { + err = cerr + } + }() + if _, err = io.Copy(out, in); err != nil { + return + } + err = out.Sync() + return +} + +type rewrite func(*ast.File) *ast.File + +// Mostly copied from gofmt +func initRewrite(rewriteRule string) rewrite { + f := strings.Split(rewriteRule, "->") + if len(f) != 2 { + fmt.Fprintf(os.Stderr, "rewrite rule must be of the form 'pattern -> replacement'\n") + os.Exit(2) + } + pattern := parseExpr(f[0], "pattern") + replace := parseExpr(f[1], "replacement") + return func(p *ast.File) *ast.File { return rewriteFile(pattern, replace, p) } +} + +// parseExpr parses s as an expression. +// It might make sense to expand this to allow statement patterns, +// but there are problems with preserving formatting and also +// with what a wildcard for a statement looks like. +func parseExpr(s, what string) ast.Expr { + x, err := parser.ParseExpr(s) + if err != nil { + fmt.Fprintf(os.Stderr, "parsing %s %s at %s\n", what, s, err) + os.Exit(2) + } + return x +} + +// Keep this function for debugging. +/* +func dump(msg string, val reflect.Value) { + fmt.Printf("%s:\n", msg) + ast.Print(fileSet, val.Interface()) + fmt.Println() +} +*/ + +// rewriteFile applies the rewrite rule 'pattern -> replace' to an entire file. +func rewriteFile(pattern, replace ast.Expr, p *ast.File) *ast.File { + cmap := ast.NewCommentMap(fileSet, p, p.Comments) + m := make(map[string]reflect.Value) + pat := reflect.ValueOf(pattern) + repl := reflect.ValueOf(replace) + + var rewriteVal func(val reflect.Value) reflect.Value + rewriteVal = func(val reflect.Value) reflect.Value { + // don't bother if val is invalid to start with + if !val.IsValid() { + return reflect.Value{} + } + for k := range m { + delete(m, k) + } + val = apply(rewriteVal, val) + if match(m, pat, val) { + val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos())) + } + return val + } + + r := apply(rewriteVal, reflect.ValueOf(p)).Interface().(*ast.File) + r.Comments = cmap.Filter(r).Comments() // recreate comments list + return r +} + +// set is a wrapper for x.Set(y); it protects the caller from panics if x cannot be changed to y. +func set(x, y reflect.Value) { + // don't bother if x cannot be set or y is invalid + if !x.CanSet() || !y.IsValid() { + return + } + defer func() { + if x := recover(); x != nil { + if s, ok := x.(string); ok && + (strings.Contains(s, "type mismatch") || strings.Contains(s, "not assignable")) { + // x cannot be set to y - ignore this rewrite + return + } + panic(x) + } + }() + x.Set(y) +} + +// Values/types for special cases. +var ( + objectPtrNil = reflect.ValueOf((*ast.Object)(nil)) + scopePtrNil = reflect.ValueOf((*ast.Scope)(nil)) + + identType = reflect.TypeOf((*ast.Ident)(nil)) + objectPtrType = reflect.TypeOf((*ast.Object)(nil)) + positionType = reflect.TypeOf(token.NoPos) + callExprType = reflect.TypeOf((*ast.CallExpr)(nil)) + scopePtrType = reflect.TypeOf((*ast.Scope)(nil)) +) + +// apply replaces each AST field x in val with f(x), returning val. +// To avoid extra conversions, f operates on the reflect.Value form. +func apply(f func(reflect.Value) reflect.Value, val reflect.Value) reflect.Value { + if !val.IsValid() { + return reflect.Value{} + } + + // *ast.Objects introduce cycles and are likely incorrect after + // rewrite; don't follow them but replace with nil instead + if val.Type() == objectPtrType { + return objectPtrNil + } + + // similarly for scopes: they are likely incorrect after a rewrite; + // replace them with nil + if val.Type() == scopePtrType { + return scopePtrNil + } + + switch v := reflect.Indirect(val); v.Kind() { + case reflect.Slice: + for i := 0; i < v.Len(); i++ { + e := v.Index(i) + set(e, f(e)) + } + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + e := v.Field(i) + set(e, f(e)) + } + case reflect.Interface: + e := v.Elem() + set(v, f(e)) + } + return val +} + +func isWildcard(s string) bool { + rune, size := utf8.DecodeRuneInString(s) + return size == len(s) && unicode.IsLower(rune) +} + +// match returns true if pattern matches val, +// recording wildcard submatches in m. +// If m == nil, match checks whether pattern == val. +func match(m map[string]reflect.Value, pattern, val reflect.Value) bool { + // Wildcard matches any expression. If it appears multiple + // times in the pattern, it must match the same expression + // each time. + if m != nil && pattern.IsValid() && pattern.Type() == identType { + name := pattern.Interface().(*ast.Ident).Name + if isWildcard(name) && val.IsValid() { + // wildcards only match valid (non-nil) expressions. + if _, ok := val.Interface().(ast.Expr); ok && !val.IsNil() { + if old, ok := m[name]; ok { + return match(nil, old, val) + } + m[name] = val + return true + } + } + } + + // Otherwise, pattern and val must match recursively. + if !pattern.IsValid() || !val.IsValid() { + return !pattern.IsValid() && !val.IsValid() + } + if pattern.Type() != val.Type() { + return false + } + + // Special cases. + switch pattern.Type() { + case identType: + // For identifiers, only the names need to match + // (and none of the other *ast.Object information). + // This is a common case, handle it all here instead + // of recursing down any further via reflection. + p := pattern.Interface().(*ast.Ident) + v := val.Interface().(*ast.Ident) + return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name + case objectPtrType, positionType: + // object pointers and token positions always match + return true + case callExprType: + // For calls, the Ellipsis fields (token.Position) must + // match since that is how f(x) and f(x...) are different. + // Check them here but fall through for the remaining fields. + p := pattern.Interface().(*ast.CallExpr) + v := val.Interface().(*ast.CallExpr) + if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() { + return false + } + } + + p := reflect.Indirect(pattern) + v := reflect.Indirect(val) + if !p.IsValid() || !v.IsValid() { + return !p.IsValid() && !v.IsValid() + } + + switch p.Kind() { + case reflect.Slice: + if p.Len() != v.Len() { + return false + } + for i := 0; i < p.Len(); i++ { + if !match(m, p.Index(i), v.Index(i)) { + return false + } + } + return true + + case reflect.Struct: + for i := 0; i < p.NumField(); i++ { + if !match(m, p.Field(i), v.Field(i)) { + return false + } + } + return true + + case reflect.Interface: + return match(m, p.Elem(), v.Elem()) + } + + // Handle token integers, etc. + return p.Interface() == v.Interface() +} + +// subst returns a copy of pattern with values from m substituted in place +// of wildcards and pos used as the position of tokens from the pattern. +// if m == nil, subst returns a copy of pattern and doesn't change the line +// number information. +func subst(m map[string]reflect.Value, pattern reflect.Value, pos reflect.Value) reflect.Value { + if !pattern.IsValid() { + return reflect.Value{} + } + + // Wildcard gets replaced with map value. + if m != nil && pattern.Type() == identType { + name := pattern.Interface().(*ast.Ident).Name + if isWildcard(name) { + if old, ok := m[name]; ok { + return subst(nil, old, reflect.Value{}) + } + } + } + + if pos.IsValid() && pattern.Type() == positionType { + // use new position only if old position was valid in the first place + if old := pattern.Interface().(token.Pos); !old.IsValid() { + return pattern + } + return pos + } + + // Otherwise copy. + switch p := pattern; p.Kind() { + case reflect.Slice: + v := reflect.MakeSlice(p.Type(), p.Len(), p.Len()) + for i := 0; i < p.Len(); i++ { + v.Index(i).Set(subst(m, p.Index(i), pos)) + } + return v + + case reflect.Struct: + v := reflect.New(p.Type()).Elem() + for i := 0; i < p.NumField(); i++ { + v.Field(i).Set(subst(m, p.Field(i), pos)) + } + return v + + case reflect.Ptr: + v := reflect.New(p.Type()).Elem() + if elem := p.Elem(); elem.IsValid() { + v.Set(subst(m, elem, pos).Addr()) + } + return v + + case reflect.Interface: + v := reflect.New(p.Type()).Elem() + if elem := p.Elem(); elem.IsValid() { + v.Set(subst(m, elem, pos)) + } + return v + } + + return pattern +} diff --git a/vendor/vendor.json b/vendor/vendor.json index c2db12d5350..a9dc0d66336 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -6,22 +6,22 @@ "checksumSHA1": "AzjRkOQtVBTwIw4RJLTygFhJs3s=", "origin": "github.com/elastic/beats/vendor/github.com/Microsoft/go-winio", "path": "github.com/Microsoft/go-winio", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { - "checksumSHA1": "bFZjImadCCzrjKSGjlg/HZpMqT4=", + "checksumSHA1": "8YvZ6Yar1o+1GUCNmyBf6Ma1n60=", "origin": "github.com/elastic/beats/vendor/github.com/Shopify/sarama", "path": "github.com/Shopify/sarama", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "DYv6Q1+VfnUVxMwvk5IshAClLvw=", "origin": "github.com/elastic/beats/vendor/github.com/Sirupsen/logrus", "path": "github.com/Sirupsen/logrus", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "qtjd74+bErubh+qyv3s+lWmn9wc=", @@ -33,197 +33,197 @@ "checksumSHA1": "dvabztWVQX8f6oMLRyv4dLH+TGY=", "origin": "github.com/elastic/beats/vendor/github.com/davecgh/go-spew/spew", "path": "github.com/davecgh/go-spew/spew", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "Gj+xR1VgFKKmFXYOJMnAczC3Znk=", "origin": "github.com/elastic/beats/vendor/github.com/docker/distribution/digestset", "path": "github.com/docker/distribution/digestset", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "2Fe4D6PGaVE2he4fUeenLmhC1lE=", "origin": "github.com/elastic/beats/vendor/github.com/docker/distribution/reference", "path": "github.com/docker/distribution/reference", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "UL2NF1EGiSsQoEfvycnuZIcUbZY=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api", "path": "github.com/docker/docker/api", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "KMFpbV3mlrbc41d2DYnq05QYpSc=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types", "path": "github.com/docker/docker/api/types", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "jVJDbe0IcyjoKc2xbohwzQr+FF0=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/blkiodev", "path": "github.com/docker/docker/api/types/blkiodev", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "AeSC0BOu1uapkGqfSXtfVSpwJzs=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/container", "path": "github.com/docker/docker/api/types/container", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "4XuWn5+wgYwUsw604jvYMklq4Hc=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/events", "path": "github.com/docker/docker/api/types/events", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "J2OKngfI3vgswudr9PZVUFcRRu0=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/filters", "path": "github.com/docker/docker/api/types/filters", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "yeB781yxPhnN6OXQ9/qSsyih3ek=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/image", "path": "github.com/docker/docker/api/types/image", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "UK+VdM648oWzyqE4OqttgmPqjoA=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/mount", "path": "github.com/docker/docker/api/types/mount", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "Gskp+nvbVe8Gk1xPLHylZvNmqTg=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/network", "path": "github.com/docker/docker/api/types/network", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "r2vWq7Uc3ExKzMqYgH0b4AKjLKY=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/registry", "path": "github.com/docker/docker/api/types/registry", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "VTxWyFud/RedrpllGdQonVtGM/A=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/strslice", "path": "github.com/docker/docker/api/types/strslice", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ZaizCpJ3eBcfR9ywpLaJd4AhM9k=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/swarm", "path": "github.com/docker/docker/api/types/swarm", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "B7ZwKzrv3t3Vlox6/bYMHhMjsM8=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/time", "path": "github.com/docker/docker/api/types/time", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "uDPQ3nHsrvGQc9tg/J9OSC4N5dQ=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/versions", "path": "github.com/docker/docker/api/types/versions", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "IBJy2zPEnYmcFJ3lM1eiRWnCxTA=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/api/types/volume", "path": "github.com/docker/docker/api/types/volume", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "c6OyeEvpQDvVLhrJSxgjEZv1tF8=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/client", "path": "github.com/docker/docker/client", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "jmo/t2zXAxirEPoFucNPXA/1SEc=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/pkg/ioutils", "path": "github.com/docker/docker/pkg/ioutils", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ndnAFCfsGC3upNQ6jAEwzxcurww=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/pkg/longpath", "path": "github.com/docker/docker/pkg/longpath", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "kr46EAa+FsUcWxOq6edyX6BUzE4=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/pkg/system", "path": "github.com/docker/docker/pkg/system", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "8I0Ez+aUYGpsDEVZ8wN/Ztf6Zqs=", "origin": "github.com/elastic/beats/vendor/github.com/docker/docker/pkg/tlsconfig", "path": "github.com/docker/docker/pkg/tlsconfig", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "JbiWTzH699Sqz25XmDlsARpMN9w=", "origin": "github.com/elastic/beats/vendor/github.com/docker/go-connections/nat", "path": "github.com/docker/go-connections/nat", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "jUfDG3VQsA2UZHvvIXncgiddpYA=", "origin": "github.com/elastic/beats/vendor/github.com/docker/go-connections/sockets", "path": "github.com/docker/go-connections/sockets", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "tuSzlS1UQ03+5Fvtqr5hI5sLLhA=", "origin": "github.com/elastic/beats/vendor/github.com/docker/go-connections/tlsconfig", "path": "github.com/docker/go-connections/tlsconfig", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ambe8F4AofPxChCJssXXwWphQQ8=", "origin": "github.com/elastic/beats/vendor/github.com/docker/go-units", "path": "github.com/docker/go-units", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "sNAU9ojYVUhO6dVXey6T3JhRQpw=", "origin": "github.com/elastic/beats/vendor/github.com/docker/libtrust", "path": "github.com/docker/libtrust", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "tJd2T/eyW6ejAev7WzGxTeUVOPQ=", @@ -235,22 +235,22 @@ "checksumSHA1": "y2Kh4iPlgCPXSGTCcFpzePYdzzg=", "origin": "github.com/elastic/beats/vendor/github.com/eapache/go-resiliency/breaker", "path": "github.com/eapache/go-resiliency/breaker", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "WHl96RVZlOOdF4Lb1OOadMpw8ls=", "origin": "github.com/elastic/beats/vendor/github.com/eapache/go-xerial-snappy", "path": "github.com/eapache/go-xerial-snappy", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "oCCs6kDanizatplM5e/hX76busE=", "origin": "github.com/elastic/beats/vendor/github.com/eapache/queue", "path": "github.com/eapache/queue", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "iS7awdGQOMgYrHf2XvIiT5w6weA=", @@ -357,722 +357,714 @@ "versionExact": "v0.4.0" }, { - "checksumSHA1": "6Vys4++kajSzgUGaTkhN+daIIt4=", + "checksumSHA1": "uAg7076qVjhDPOYTx2x9MiBg9hA=", "path": "github.com/elastic/beats/dev-tools/mage", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "y34pfVnTprxa4BvLKfiBbqTJaGA=", "path": "github.com/elastic/beats/libbeat/api", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "I4vTnXyhD99J/OfmZqHRroy1HRE=", "path": "github.com/elastic/beats/libbeat/asset", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "OdUUHDW+/OEehe6ewr8UEteIVo4=", "path": "github.com/elastic/beats/libbeat/autodiscover", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "pW/XBpb2BIceplyuoqvwTtowH7c=", "path": "github.com/elastic/beats/libbeat/autodiscover/builder", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "IifZH9hzzPymGV2XQfQ/tFR4uSE=", "path": "github.com/elastic/beats/libbeat/autodiscover/meta", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "ZcmRnVuYkeSqQZbV5gi7z9PR3I8=", "path": "github.com/elastic/beats/libbeat/autodiscover/providers/docker", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "1BEmoIUr+/GODX/YKv307PYB4aM=", "path": "github.com/elastic/beats/libbeat/autodiscover/providers/jolokia", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "8s5j0/VbYvFNV48LdJTtkBEjLbQ=", "path": "github.com/elastic/beats/libbeat/autodiscover/providers/kubernetes", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "/bV/lT2HY/CPP75dNeuvKZWeFqg=", "path": "github.com/elastic/beats/libbeat/autodiscover/template", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "Oj2NCArxKbamqSMcTt/0Jq9HSx4=", "path": "github.com/elastic/beats/libbeat/beat", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "UkvFltjIEYVRUBGZLecI3sfvxpY=", "path": "github.com/elastic/beats/libbeat/cfgfile", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "NP63NO+chtAOIPaeiIlSXmzWits=", "path": "github.com/elastic/beats/libbeat/cloudid", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "I6I4Pi7XmpedDULWEbh9xR9JF2s=", "path": "github.com/elastic/beats/libbeat/cmd", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "FC7ToRnGzD/3VIxiTF7DJc98rX8=", "path": "github.com/elastic/beats/libbeat/cmd/export", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "D2e+QrFgFGDDgR3S+QzL/nQ/2+4=", "path": "github.com/elastic/beats/libbeat/cmd/instance", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "TGW7SUpyY5XCLRjLW2n62yDKqBk=", "path": "github.com/elastic/beats/libbeat/cmd/test", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "seiszwA3/jFiehFcmcyC6BtMEMU=", + "checksumSHA1": "X35cXnNQEhaxJoeKXYKl2Bf+C8w=", "path": "github.com/elastic/beats/libbeat/common", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "lFYRu/M9CL6/povZOeBYui9/laI=", "path": "github.com/elastic/beats/libbeat/common/atomic", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "D1rvRGmNzRcbBBpRgbBZGc7lkOU=", "path": "github.com/elastic/beats/libbeat/common/bus", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "zRpP/UzB/wFQNLceGdVgC4QqviM=", "path": "github.com/elastic/beats/libbeat/common/cfgtype", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "JW9FjfvZn4rawSFC4UCCK3zjxQI=", "path": "github.com/elastic/beats/libbeat/common/cfgwarn", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "Xe8ezkUIrqnG2wRIAgrtIei2E5E=", "path": "github.com/elastic/beats/libbeat/common/cli", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "m/qsRUwCpXxJVQd3L8s8gwlFsW4=", "path": "github.com/elastic/beats/libbeat/common/docker", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "Gxdi0z5FpIG68TQBD+zho4pEBlU=", "path": "github.com/elastic/beats/libbeat/common/dtfmt", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "L0LktHMqSLS03T63HBB/F04y3H4=", "path": "github.com/elastic/beats/libbeat/common/file", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "pt4OCbyb9z7fgJEidmOx6mua0h8=", "path": "github.com/elastic/beats/libbeat/common/fmtstr", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "K8hsg9OHpVHm7A43uq+TdX5DRc4=", "path": "github.com/elastic/beats/libbeat/common/jsontransform", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "1dLYXgcFynpncg5WCArJl6w2aqg=", "path": "github.com/elastic/beats/libbeat/common/kubernetes", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "tX/nsD/KEE0KCWECoPyx5CHNPdc=", "path": "github.com/elastic/beats/libbeat/common/match", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "uMo9yaQAFfFG9iOsmdhQokffvpc=", "path": "github.com/elastic/beats/libbeat/common/safemapstr", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "719FzIxi7bvpmh3Z1Ugn1VzY7Ro=", "path": "github.com/elastic/beats/libbeat/common/schema", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "uffmniMUvoDPoH/udr7ogkh062E=", "path": "github.com/elastic/beats/libbeat/common/schema/mapstriface", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "yU4kvU65qUWsFs4aq+KjplWCX94=", "path": "github.com/elastic/beats/libbeat/common/seccomp", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "bfwgWh6tuDRh6ukgJyS/1qF/cyk=", "path": "github.com/elastic/beats/libbeat/common/streambuf", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "IQGJeUodp0fl4Zy8W1rBzWtWSWA=", "path": "github.com/elastic/beats/libbeat/common/terminal", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "6SNb83rQbHHamMIny6qLPBJ4Vn4=", "path": "github.com/elastic/beats/libbeat/common/transport/tlscommon", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "oHeO7hUYwCb9+DWYFaKzw89YanA=", "path": "github.com/elastic/beats/libbeat/conditions", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "v1uglELtgBUQ35dH1IOH8ZSeNaw=", "path": "github.com/elastic/beats/libbeat/dashboards", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "i/7M5crL7LrHAkyBLBzaj3LLV08=", "path": "github.com/elastic/beats/libbeat/feature", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "NzJlDvYwC+pBb9mr5jV/UG8LZbU=", + "checksumSHA1": "J6JozoRRF+ZB7RvepDmYbSrZ8U0=", "path": "github.com/elastic/beats/libbeat/generator/fields", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "vCc6XCOhlX500QsMb0h2dMAX2wE=", "path": "github.com/elastic/beats/libbeat/keystore", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "QFon+IhYQiBDXzV9TBPN5Un6FsE=", "path": "github.com/elastic/beats/libbeat/kibana", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", - "version": "master", - "versionExact": "master" - }, - { - "checksumSHA1": "z8VztKgyYSA7eyVuhNocdpav6zA=", - "path": "github.com/elastic/beats/libbeat/kibana/", - "revision": "78f520f2c417ebc51aac558662cb68b3500d03bf", - "revisionTime": "2018-04-30T12:46:23Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "EJeVCFtI/9yrAYD5VuBhtJ1pl4c=", "path": "github.com/elastic/beats/libbeat/logp", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "bM+Zvy63NmXBfPQZYZmWBCo/UIk=", "path": "github.com/elastic/beats/libbeat/logp/configure", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "b93RtvT+BZW3gpAMm68YZjyj664=", + "checksumSHA1": "/qNmzEvnTPNTKsZG8NgDzJZ+6f8=", "path": "github.com/elastic/beats/libbeat/metric/system/cpu", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "NK3LQ/fpi8LBKDlcUokPI0n5ecE=", "path": "github.com/elastic/beats/libbeat/metric/system/host", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "AeCEL+vyIBE0WH16IaR4ygy+6IA=", "path": "github.com/elastic/beats/libbeat/metric/system/memory", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "rKh4Kc2nLHGP/l9XUQuYwLFZ2sk=", "path": "github.com/elastic/beats/libbeat/metric/system/process", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "HwjxgzHx7D24UC0mvx2i21jSDJg=", "path": "github.com/elastic/beats/libbeat/monitoring", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "7DdIn1PxWmHIUpwBbgCl3avLHj4=", "path": "github.com/elastic/beats/libbeat/monitoring/adapter", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "L2JLxzFvSCOjJ268kNfnPS1BAeI=", "path": "github.com/elastic/beats/libbeat/monitoring/report", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "emNnwkYmVpt6yTDytvqnFAacPUQ=", "path": "github.com/elastic/beats/libbeat/monitoring/report/elasticsearch", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "NIdJCMcoHBoqaAMqaZ4Ul/SHU3E=", "path": "github.com/elastic/beats/libbeat/monitoring/report/log", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "B8UscDQeoAmivVRRzEwJqv1MTmk=", "path": "github.com/elastic/beats/libbeat/outputs", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "a2kloD1x12fBRXViCIXm+ebyUYI=", "path": "github.com/elastic/beats/libbeat/outputs/codec", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "dO9pXcIpo6PH5yxMbJUSb/BYbkc=", "path": "github.com/elastic/beats/libbeat/outputs/codec/format", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "q6U+U/YTIWKyBeAY/hSzgs3lOzA=", "path": "github.com/elastic/beats/libbeat/outputs/codec/json", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "QezZ6ewDQx90l2dmhEuZN0feBLc=", "path": "github.com/elastic/beats/libbeat/outputs/console", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "+DNaifHNsLf9ua6SkVtGJepKWto=", "path": "github.com/elastic/beats/libbeat/outputs/elasticsearch", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "HmvKevxrIn6OJ8y1w76qqk3oVUQ=", "path": "github.com/elastic/beats/libbeat/outputs/fileout", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "eeG0BAf2lSQJdcdcN8mTBlcaTO0=", + "checksumSHA1": "GdrOshv79fuCzjl7m5JzZdNxAaw=", "path": "github.com/elastic/beats/libbeat/outputs/kafka", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "gK4mKPDnPET3jfzhyS22TngM0aE=", "path": "github.com/elastic/beats/libbeat/outputs/logstash", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "NJv+STaa3bEOeVra8WkEgOtzQic=", "path": "github.com/elastic/beats/libbeat/outputs/outil", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "PYJYIYhYjCIXMN6ngjHVy5YcOd0=", "path": "github.com/elastic/beats/libbeat/outputs/redis", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "ft3nHLWZjYbMATPWMJepIFv7kdk=", "path": "github.com/elastic/beats/libbeat/outputs/transport", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "72qBNCQrGrvLZFMGD2AqxxKQMsw=", "path": "github.com/elastic/beats/libbeat/outputs/transport/transptest", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "hb8M4qSLzgDXpQmdQfEyB7aChhI=", "path": "github.com/elastic/beats/libbeat/paths", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "MnBepwK6hFumvOtA+c3JGACMZdM=", + "checksumSHA1": "Ex591VUskR/iwyxQGZPagLgy6sc=", "path": "github.com/elastic/beats/libbeat/plugin", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "lhUKTNKAJqUl7dgyZN8En9B2Vnw=", "path": "github.com/elastic/beats/libbeat/processors", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "LLJw1fwYwLkjHTLzj6RuxeBCdbE=", + "checksumSHA1": "IYLNfiWN8aRJYFfwtZ8Ou/Teo4k=", "path": "github.com/elastic/beats/libbeat/processors/actions", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "0w12Qe9n8IgAWf98HN2xJk49fW8=", "path": "github.com/elastic/beats/libbeat/processors/add_cloud_metadata", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "O0CSXnFDU69TzhCkboDUtQQWxkg=", "path": "github.com/elastic/beats/libbeat/processors/add_docker_metadata", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "jSqYsKJ124qH3UKEid0U44RvrV8=", "path": "github.com/elastic/beats/libbeat/processors/add_host_metadata", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "jQxShMKNtRduMiH2EvvagaZwl3w=", "path": "github.com/elastic/beats/libbeat/processors/add_kubernetes_metadata", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "Gq+gEFL9CgrhQZOMYj0tfvyNIZs=", "path": "github.com/elastic/beats/libbeat/processors/add_locale", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "vKkAm3Mz4r2bcYahkyK/DkxT6sM=", "path": "github.com/elastic/beats/libbeat/processors/dissect", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "7kYMLJJXmsDd9GyXgq9Z9eJdqrk=", "path": "github.com/elastic/beats/libbeat/publisher", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "X8cgQCH4iuEYK+xHRIxSL+4XFvw=", "path": "github.com/elastic/beats/libbeat/publisher/includes", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "30Kb1SZHJLwEkrYHqYSwZd8mX6c=", "path": "github.com/elastic/beats/libbeat/publisher/pipeline", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "UQK7Jv4EkpQeEzMTDWZ1cHBTTZk=", "path": "github.com/elastic/beats/libbeat/publisher/queue", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "SCBQW+RDpu2PwBIDHhsHpwjjdvg=", "path": "github.com/elastic/beats/libbeat/publisher/queue/memqueue", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "nu8uxHHUDHromK4VvJRXRMJXclc=", "path": "github.com/elastic/beats/libbeat/publisher/queue/spool", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "fZ5S9LEZUcy5JlhwpHBn4MGNjOg=", "path": "github.com/elastic/beats/libbeat/publisher/testing", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "6fUK+ATQJiNqlA8QnB6iM6xn/ME=", - "path": "github.com/elastic/beats/libbeat/service", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "checksumSHA1": "BlhhYVlDk+PeZlLj2MoiULvYSGU=", + "path": "github.com/elastic/beats/libbeat/scripts/cmd/global_fields", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "unPYsSkPZR/PukshE+IZlyvCrE4=", - "path": "github.com/elastic/beats/libbeat/setup/kibana", - "revision": "c4593443ab5943ac5dd3465d88236b0cf417fab6", - "revisionTime": "2018-05-22T10:47:09Z", + "checksumSHA1": "6fUK+ATQJiNqlA8QnB6iM6xn/ME=", + "path": "github.com/elastic/beats/libbeat/service", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { - "checksumSHA1": "+COF5R9abcR3c7NtVffp78XDDS4=", + "checksumSHA1": "rxy3HCgjYpiNO61dYSwk9J47Ag4=", "path": "github.com/elastic/beats/libbeat/template", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "cUaub8fyxT3TmLTy5mUA1qZ5lJc=", "path": "github.com/elastic/beats/libbeat/testing", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, { "checksumSHA1": "Z4Su3+qWumYRW/okmo/VAi762Ys=", "path": "github.com/elastic/beats/libbeat/version", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z", + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z", "version": "master", "versionExact": "master" }, @@ -1080,29 +1072,29 @@ "checksumSHA1": "3jizmlZPCyo6FAZY8Trk9jA8NH4=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-lumber/client/v2", "path": "github.com/elastic/go-lumber/client/v2", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "m6HLKpDAZlkTTQMqabf3aT6TQ/s=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-lumber/protocol/v2", "path": "github.com/elastic/go-lumber/protocol/v2", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "TatpgVf9fhQp1GtNwSyNw5cgVKM=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-seccomp-bpf", "path": "github.com/elastic/go-seccomp-bpf", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "qTs7QT+GC2Dr4aFoLFHCkAOoVeU=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-seccomp-bpf/arch", "path": "github.com/elastic/go-seccomp-bpf/arch", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "BY+a5iQICad7U2AZqwej2SIW9J8=", @@ -1225,46 +1217,46 @@ "revisionTime": "2018-04-03T17:15:40Z" }, { - "checksumSHA1": "d0qibYdQy5G1YqI5H+xNC0QJ66g=", + "checksumSHA1": "MK8/w0Idj7kRBUiBabARPdm9hOo=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-ucfg", "path": "github.com/elastic/go-ucfg", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "X+R/CD8SokJrmlxFTx2nSevRDhQ=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-ucfg/cfgutil", "path": "github.com/elastic/go-ucfg/cfgutil", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { - "checksumSHA1": "dShGF53hLUufO70RAjju+RT0fHY=", + "checksumSHA1": "zC8mCPW/pPPNcuHQOc/B/Ej1W1U=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-ucfg/flag", "path": "github.com/elastic/go-ucfg/flag", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "esXpiQlEvTOUwsE0nNesso8albo=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-ucfg/internal/parse", "path": "github.com/elastic/go-ucfg/internal/parse", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "5mXUhhlPdvcAFKiQENInTJWrtQM=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-ucfg/json", "path": "github.com/elastic/go-ucfg/json", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "Bg6vistPQLftv2fEYB7GWwSExv8=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/go-ucfg/yaml", "path": "github.com/elastic/go-ucfg/yaml", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "yu/X+qHftvfQlAnjPdYLwrDn2nI=", @@ -1276,218 +1268,218 @@ "checksumSHA1": "RPOLNUpw00QUUaA/U4YbPVf6WlA=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/gosigar", "path": "github.com/elastic/gosigar", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "TX9y4oPL5YmT4Gb/OU4GIPTdQB4=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/gosigar/cgroup", "path": "github.com/elastic/gosigar/cgroup", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "hPqGM3DENaGfipEODoyZ4mKogTQ=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/gosigar/sys", "path": "github.com/elastic/gosigar/sys", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "mLq5lOyD0ZU39ysXuf1ETOLJ+f0=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/gosigar/sys/linux", "path": "github.com/elastic/gosigar/sys/linux", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "qDsgp2kAeI9nhj565HUScaUyjU4=", "origin": "github.com/elastic/beats/vendor/github.com/elastic/gosigar/sys/windows", "path": "github.com/elastic/gosigar/sys/windows", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "hTxFrbA619JCHysWjXHa9U6bfto=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s", "path": "github.com/ericchiang/k8s", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "y8fNiBLSoGojnUsGDsdLlsJYyqQ=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/apiextensions/v1beta1", "path": "github.com/ericchiang/k8s/apis/apiextensions/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "JxQ/zEWQSrncYNKifCuMctq+Tsw=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/apps/v1beta1", "path": "github.com/ericchiang/k8s/apis/apps/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "bjklGt/pc6kWOZewAw87Hchw5oY=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/authentication/v1", "path": "github.com/ericchiang/k8s/apis/authentication/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "LExhnM9Vn0LQoLQWszQ7aIxDxb4=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/authentication/v1beta1", "path": "github.com/ericchiang/k8s/apis/authentication/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "GM+PzOiBoq3cxx4h5RKVUb3UH60=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/authorization/v1", "path": "github.com/ericchiang/k8s/apis/authorization/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "zfr5oUVjbWRfvXi2LJiGMfFeDQY=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/authorization/v1beta1", "path": "github.com/ericchiang/k8s/apis/authorization/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "izkXNDp5a5WP45jU0hSfTrwyfvM=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/autoscaling/v1", "path": "github.com/ericchiang/k8s/apis/autoscaling/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "FryZuAxWn4Ig8zc913w9BdfYzvs=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/batch/v1", "path": "github.com/ericchiang/k8s/apis/batch/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ylo7Z8wyJD+tmICB7wsOVIBpO+U=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/batch/v2alpha1", "path": "github.com/ericchiang/k8s/apis/batch/v2alpha1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "+d8+mSdkdcPWQIpczXDZZW0lrjg=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/certificates/v1beta1", "path": "github.com/ericchiang/k8s/apis/certificates/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "S7AvxmCe/+WoFP/v9lZr0Mv66qg=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/core/v1", "path": "github.com/ericchiang/k8s/apis/core/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "cWPoP6XZN7WMnEVMPcgPgg3Aw9Q=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/extensions/v1beta1", "path": "github.com/ericchiang/k8s/apis/extensions/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "vaNrBPcGWeDd1rXl8+uN08uxWhE=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/imagepolicy/v1alpha1", "path": "github.com/ericchiang/k8s/apis/imagepolicy/v1alpha1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "UNTTH+Ppu4vImnF+bPkG3/NR3gg=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/meta/v1", "path": "github.com/ericchiang/k8s/apis/meta/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "Mmyg9Wh+FCVR6fV8MGEKRxvqZ2k=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/policy/v1beta1", "path": "github.com/ericchiang/k8s/apis/policy/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "bvwYS/wrBkyAfvCjzMbi/vKamrQ=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/rbac/v1alpha1", "path": "github.com/ericchiang/k8s/apis/rbac/v1alpha1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "m1Tde18NwewnvJoOYL3uykNcBuM=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/rbac/v1beta1", "path": "github.com/ericchiang/k8s/apis/rbac/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "JirJkoeIkWJRNrbprsQvqwisxds=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/resource", "path": "github.com/ericchiang/k8s/apis/resource", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "rQZ69PjEClQQ+PGEHRKzkGVVQyw=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/settings/v1alpha1", "path": "github.com/ericchiang/k8s/apis/settings/v1alpha1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "pp0AetmPoKy7Rz0zNhBwUpExkbc=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/storage/v1", "path": "github.com/ericchiang/k8s/apis/storage/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "WeACcIrS4EkeBm8TTftwuVniaWk=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/apis/storage/v1beta1", "path": "github.com/ericchiang/k8s/apis/storage/v1beta1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "Su6wSR8V8HL2QZsF8icJ0R9AFq8=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/runtime", "path": "github.com/ericchiang/k8s/runtime", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "8ETrRvIaXPfD21N7fa8kdbumL00=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/runtime/schema", "path": "github.com/ericchiang/k8s/runtime/schema", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "cMk3HE8/81ExHuEs0F5sZCclOFs=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/util/intstr", "path": "github.com/ericchiang/k8s/util/intstr", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "fobEKiMk5D7IGvCSwh4HdG1o98c=", "origin": "github.com/elastic/beats/vendor/github.com/ericchiang/k8s/watch/versioned", "path": "github.com/ericchiang/k8s/watch/versioned", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "AANTVr9CVVyzsgviODY6Wi2thuM=", @@ -1499,22 +1491,22 @@ "checksumSHA1": "2UmMbNHc8FBr98mJFN1k8ISOIHk=", "origin": "github.com/elastic/beats/vendor/github.com/garyburd/redigo/internal", "path": "github.com/garyburd/redigo/internal", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "507OiSqTxfGCje7xDT5eq9CCaNQ=", "origin": "github.com/elastic/beats/vendor/github.com/garyburd/redigo/redis", "path": "github.com/garyburd/redigo/redis", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ImX1uv6O09ggFeBPUJJ2nu7MPSA=", "origin": "github.com/elastic/beats/vendor/github.com/ghodss/yaml", "path": "github.com/ghodss/yaml", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "gxV/cPPLkByTdY8y172t7v4qcZA=", @@ -1544,15 +1536,15 @@ "checksumSHA1": "kBeNcaKk56FguvPSUCEaH6AxpRc=", "origin": "github.com/elastic/beats/vendor/github.com/golang/protobuf/proto", "path": "github.com/golang/protobuf/proto", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "p/8vSviYF91gFflhrt5vkyksroo=", "origin": "github.com/elastic/beats/vendor/github.com/golang/snappy", "path": "github.com/golang/snappy", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "d9PxF1XQGLMJZRct2R8qVM/eYlE=", @@ -1576,8 +1568,8 @@ "checksumSHA1": "l9wW52CYGbmO/NGwYZ/Op2QTmaA=", "origin": "github.com/elastic/beats/vendor/github.com/joeshaw/multierror", "path": "github.com/joeshaw/multierror", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "pa+ZwMzIv+u+BlL8Q2xgL9cQtJg=", @@ -1586,32 +1578,32 @@ "revisionTime": "2015-03-20T12:54:33Z" }, { - "checksumSHA1": "KKxbAKrKrfd33YPpkNsDmTN3S+M=", + "checksumSHA1": "+CqJGh7NIDMnHgScq9sl9tPrnVM=", "origin": "github.com/elastic/beats/vendor/github.com/klauspost/compress/flate", "path": "github.com/klauspost/compress/flate", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "+azPXaZpPF14YHRghNAer13ThQU=", "origin": "github.com/elastic/beats/vendor/github.com/klauspost/compress/zlib", "path": "github.com/klauspost/compress/zlib", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { - "checksumSHA1": "R6zKqn31GjJH1G8W/api7fAW0RU=", + "checksumSHA1": "iKPMvbAueGfdyHcWCgzwKzm8WVo=", "origin": "github.com/elastic/beats/vendor/github.com/klauspost/cpuid", "path": "github.com/klauspost/cpuid", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "BM6ZlNJmtKy3GBoWwg2X55gnZ4A=", "origin": "github.com/elastic/beats/vendor/github.com/klauspost/crc32", "path": "github.com/klauspost/crc32", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "k3e1TD8wrhxfUUG3pQBb10ppNGA=", @@ -1683,29 +1675,29 @@ "checksumSHA1": "sWdAYPKyaT4SW8hNQNpRS0sU4lU=", "origin": "github.com/elastic/beats/vendor/github.com/mitchellh/hashstructure", "path": "github.com/mitchellh/hashstructure", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "2AyUkWjutec6p+470tgio8mYOxI=", "origin": "github.com/elastic/beats/vendor/github.com/opencontainers/go-digest", "path": "github.com/opencontainers/go-digest", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "eOMCORUm8KxiGSy0hBuQsMsxauo=", "origin": "github.com/elastic/beats/vendor/github.com/opencontainers/image-spec/specs-go", "path": "github.com/opencontainers/image-spec/specs-go", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "9YujSsJjiLGkQMzwWycsjqR340k=", "origin": "github.com/elastic/beats/vendor/github.com/opencontainers/image-spec/specs-go/v1", "path": "github.com/opencontainers/image-spec/specs-go/v1", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "JVGDxPn66bpe6xEiexs1r+y6jF0=", @@ -1717,22 +1709,22 @@ "checksumSHA1": "WmrPO1ovmQ7t7hs9yZGbr2SAoM4=", "origin": "github.com/elastic/beats/vendor/github.com/pierrec/lz4", "path": "github.com/pierrec/lz4", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "IT4sX58d+e8osXHV5U6YCSdB/uE=", "origin": "github.com/elastic/beats/vendor/github.com/pierrec/xxHash/xxHash32", "path": "github.com/pierrec/xxHash/xxHash32", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "PdQm3s8DoVJ17Vk8n7o5iPa7PK0=", "origin": "github.com/elastic/beats/vendor/github.com/pkg/errors", "path": "github.com/pkg/errors", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "LuFv4/jlrmFNnDb/5SCSEPAM9vU=", @@ -1770,8 +1762,8 @@ "checksumSHA1": "KAzbLjI9MzW2tjfcAsK75lVRp6I=", "origin": "github.com/elastic/beats/vendor/github.com/rcrowley/go-metrics", "path": "github.com/rcrowley/go-metrics", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "6JP37UqrI0H80Gpk0Y2P+KXgn5M=", @@ -1819,8 +1811,8 @@ "checksumSHA1": "zmC8/3V4ls53DJlNTKDZwPSC/dA=", "origin": "github.com/elastic/beats/vendor/github.com/satori/go.uuid", "path": "github.com/satori/go.uuid", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "v7C+aJ1D/z3MEeCte6bxvpoGjM4=", @@ -1963,29 +1955,29 @@ "checksumSHA1": "dr5+PfIRzXeN+l1VG+s0lea9qz8=", "origin": "github.com/elastic/beats/vendor/golang.org/x/net/context", "path": "golang.org/x/net/context", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "WHc3uByvGaMcnSoI21fhzYgbOgg=", "origin": "github.com/elastic/beats/vendor/golang.org/x/net/context/ctxhttp", "path": "golang.org/x/net/context/ctxhttp", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "TWcqN2+KUWtdqnu18rruwn14UEQ=", "origin": "github.com/elastic/beats/vendor/golang.org/x/net/http2", "path": "golang.org/x/net/http2", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ezWhc7n/FtqkLDQKeU2JbW+80tE=", "origin": "github.com/elastic/beats/vendor/golang.org/x/net/http2/hpack", "path": "golang.org/x/net/http2/hpack", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "RcrB7tgYS/GMW4QrwVdMOTNqIU8=", @@ -1997,8 +1989,8 @@ "checksumSHA1": "3xyuaSNmClqG4YWC7g0isQIbUTc=", "origin": "github.com/elastic/beats/vendor/golang.org/x/net/lex/httplex", "path": "golang.org/x/net/lex/httplex", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "whCSspa9pYarl527EuhPz91cbUE=", @@ -2010,8 +2002,8 @@ "checksumSHA1": "QEm/dePZ0lOnyOs+m22KjXfJ/IU=", "origin": "github.com/elastic/beats/vendor/golang.org/x/net/proxy", "path": "golang.org/x/net/proxy", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "S0DP7Pn7sZUmXc55IzZnNvERu6s=", @@ -2023,15 +2015,15 @@ "checksumSHA1": "CNHEeGnucEUlTHJrLS2kHtfNbws=", "origin": "github.com/elastic/beats/vendor/golang.org/x/sys/unix", "path": "golang.org/x/sys/unix", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "eQq+ZoTWPjyizS9XalhZwfGjQao=", "origin": "github.com/elastic/beats/vendor/golang.org/x/sys/windows", "path": "golang.org/x/sys/windows", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "ZdFZFaXmCgEEaEhVPkyXrnhKhsg=", @@ -2043,15 +2035,15 @@ "checksumSHA1": "VNlkHemg81Ba7ElHfKKUU1h+U1U=", "origin": "github.com/elastic/beats/vendor/golang.org/x/sys/windows/svc", "path": "golang.org/x/sys/windows/svc", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "lZi+t2ilFyYSpqL1ThwNf8ot3WQ=", "origin": "github.com/elastic/beats/vendor/golang.org/x/sys/windows/svc/debug", "path": "golang.org/x/sys/windows/svc/debug", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "uVlUSSKplihZG7N+QJ6fzDZ4Kh8=", @@ -2099,8 +2091,8 @@ "checksumSHA1": "fALlQNY1fM99NesfLJ50KguWsio=", "origin": "github.com/elastic/beats/vendor/gopkg.in/yaml.v2", "path": "gopkg.in/yaml.v2", - "revision": "a314e34ea823c39f0838be59de7f299831a8a018", - "revisionTime": "2018-07-20T09:04:15Z" + "revision": "9b268ba852ae93e1080c745a1031c72243f64df0", + "revisionTime": "2018-07-24T15:10:07Z" }, { "checksumSHA1": "tFDvoOebIC12z/m4GKPqrE7BrUM=",