Skip to content

Commit

Permalink
Merge branch 'master' into bdu/multi-machine-tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Richard Artoul authored Dec 5, 2019
2 parents 6aa1208 + a585b99 commit 358a450
Show file tree
Hide file tree
Showing 166 changed files with 8,722 additions and 5,788 deletions.
2 changes: 1 addition & 1 deletion .ci
Submodule .ci updated 1 files
+5 −1 docker/build.sh
4 changes: 0 additions & 4 deletions .fossa.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,6 @@ analyze:
type: go
target: github.com/m3db/m3/src/cmd/tools/read_index_ids/main
path: src/cmd/tools/read_index_ids/main
- name: github.com/m3db/m3/src/cmd/tools/verify_commitlogs/main
type: go
target: github.com/m3db/m3/src/cmd/tools/verify_commitlogs/main
path: src/cmd/tools/verify_commitlogs/main
- name: github.com/m3db/m3/src/cmd/tools/verify_index_files/main
type: go
target: github.com/m3db/m3/src/cmd/tools/verify_index_files/main
Expand Down
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ thrift_output_dir := generated/thrift/rpc
thrift_rules_dir := generated/thrift
vendor_prefix := vendor
cache_policy ?= recently_read
genny_target ?= genny-all

BUILD := $(abspath ./bin)
VENDOR := $(m3_package_path)/$(vendor_prefix)
Expand Down Expand Up @@ -88,7 +89,6 @@ TOOLS := \
read_index_files \
clone_fileset \
dtest \
verify_commitlogs \
verify_index_files \
carbon_load \
docs_test \
Expand Down Expand Up @@ -333,7 +333,7 @@ asset-gen-$(SUBDIR): install-tools
genny-gen-$(SUBDIR): install-tools
@echo "--- Generating genny files $(SUBDIR)"
@[ ! -f $(SELF_DIR)/src/$(SUBDIR)/generated-source-files.mk ] || \
PATH=$(combined_bin_paths):$(PATH) make -f $(SELF_DIR)/src/$(SUBDIR)/generated-source-files.mk genny-all
PATH=$(combined_bin_paths):$(PATH) make -f $(SELF_DIR)/src/$(SUBDIR)/generated-source-files.mk $(genny_target)
@PATH=$(combined_bin_paths):$(PATH) bash -c "source ./scripts/auto-gen-helpers.sh && gen_cleanup_dir '*_gen.go' $(SELF_DIR)/src/$(SUBDIR)/ && gen_cleanup_dir '*_gen_test.go' $(SELF_DIR)/src/$(SUBDIR)/"

.PHONY: license-gen-$(SUBDIR)
Expand Down Expand Up @@ -474,7 +474,7 @@ metalint: install-gometalinter install-linter-badtime install-linter-importorder
# Tests that all currently generated types match their contents if they were regenerated
.PHONY: test-all-gen
test-all-gen: all-gen
@test "$(shell git diff --exit-code --shortstat 2>/dev/null)" = "" || (git diff --text --exit-code && echo "Check git status, there are dirty files" && exit 1)
@test "$(shell git --no-pager diff --exit-code --shortstat 2>/dev/null)" = "" || (git --no-pager diff --text --exit-code && echo "Check git status, there are dirty files" && exit 1)
@test "$(shell git status --exit-code --porcelain 2>/dev/null | grep "^??")" = "" || (git status --exit-code --porcelain && echo "Check git status, there are untracked files" && exit 1)

# Runs a fossa license report
Expand Down
14 changes: 7 additions & 7 deletions docs/operational_guide/namespace_configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,16 +53,16 @@ curl -X POST <M3_COORDINATOR_IP_ADDRESS>:<CONFIGURED_PORT(default 7201)>/api/v1/
"snapshotEnabled": true,
"repairEnabled": false,
"retentionOptions": {
"retentionPeriodDuration": "2d",
"blockSizeDuration": "2h",
"bufferFutureDuration": "10m",
"bufferPastDuration": "10m",
"retentionPeriod": "2d",
"blockSize": "2h",
"bufferFuture": "10m",
"bufferPast": "10m",
"blockDataExpiry": true,
"blockDataExpiryAfterNotAccessPeriodDuration": "5m"
"blockDataExpiryAfterNotAccessedPeriod": "5m"
},
"indexOptions": {
"enabled": true,
"blockSizeDuration": "2h"
"blockSize": "2h"
}
}
}'
Expand Down Expand Up @@ -178,7 +178,7 @@ Whether to use the built-in indexing. Must be `true`.

Can be modified without creating a new namespace: `no`

#### blockSizeDuration
#### blockSize

The size of blocks (in duration) that the index uses.
Should match the databases [blocksize](#blocksize) for optimal memory usage.
Expand Down
1 change: 1 addition & 0 deletions mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ pages:
- "Bootstrapping & Crash Recovery": "operational_guide/bootstrapping_crash_recovery.md"
- "Docker & Kernel Configuration": "operational_guide/kernel_configuration.md"
- "etcd": "operational_guide/etcd.md"
- "Monitoring": "operational_guide/monitoring.md"
- "Integrations":
- "Prometheus": "integrations/prometheus.md"
- "Graphite": "integrations/graphite.md"
Expand Down
6 changes: 3 additions & 3 deletions scripts/auto-gen-helpers.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ revert_copyright_only_change() {
# generated file will not contain the copyright notice and thus it will
# add in the copyright (with the new year).
local FILE=$0
numDiffLines=$(git diff --text -U0 $FILE | # Get file text diffs with no context.
grep -E -v '^\+\+\+|^---' | # Exclude file descriptors.
grep -E '^-|^\+' | # Get only line diffs.
numDiffLines=$(git --no-pager diff --text -U0 $FILE | # Get file text diffs with no context.
grep -E -v '^\+\+\+|^---' | # Exclude file descriptors.
grep -E '^-|^\+' | # Get only line diffs.
grep -Evc '^-// Copyright \(c\)|^\+// Copyright \(c\)') # Exclude copyrights and get the number of lines remaining.
if [ $numDiffLines = 0 ]; then
git checkout -- "$FILE" 2> /dev/null # Remove changes, since the only change was the copyright year.
Expand Down
225 changes: 225 additions & 0 deletions scripts/docker-integration-tests/query_fanout/restrict.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,225 @@
// Copyright (c) 2019 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.

package main

import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net/http"
"os"
"runtime"

"github.com/m3db/m3/src/query/api/v1/handler"
"github.com/m3db/m3/src/query/api/v1/handler/prometheus"
"github.com/m3db/m3/src/query/models"

"github.com/stretchr/testify/require"
)

func main() {
var ts int
flag.IntVar(&ts, "t", -1, "metric name to search")
flag.Parse()

require.True(t, ts > 0, "no timestamp supplied")
name = fmt.Sprintf("foo_%d", ts)
instant := fmt.Sprintf("http://0.0.0.0:7201/api/v1/query?query=%s", name)
rnge := fmt.Sprintf("http://0.0.0.0:7201/api/v1/query_range?query=%s"+
"&start=%d&end=%d&step=100", name, ts/100*100, (ts/100+1)*100)

for _, url := range []string{instant, rnge} {
singleClusterDefaultStrip(url)
bothClusterCustomStrip(url)
bothClusterDefaultStrip(url)
bothClusterNoStrip(url)
bothClusterMultiStrip(url)
}
}

func queryWithHeader(url string, h string) (prometheus.Response, error) {
var result prometheus.Response
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return result, err
}

req.Header.Add(handler.RestrictByTagsJSONHeader, h)
client := http.DefaultClient
resp, err := client.Do(req)
if err != nil {
return result, err
}

if resp.StatusCode != http.StatusOK {
return result, fmt.Errorf("response failed with code %s", resp.Status)
}

defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return result, err
}

json.Unmarshal(data, &result)
return result, err
}

func mustMatcher(t models.MatchType, n string, v string) models.Matcher {
m, err := models.NewMatcher(models.MatchEqual, []byte("val"), []byte("1"))
if err != nil {
panic(err)
}

return m
}

type tester struct{}

// Ensure tester is a TestingT and set a global `t`.
var t require.TestingT = &tester{}

// name is global and set on startup.
var name string

func (t *tester) Errorf(format string, args ...interface{}) {
_, fn, line, _ := runtime.Caller(4)
args[2] = fmt.Sprintf(" at %s:%d:\n%v", fn, line, args[2])
fmt.Printf(format, args...)
}

func (t *tester) FailNow() {
os.Exit(1)
}

func mustParseOpts(o handler.StringTagOptions) string {
m, err := json.Marshal(o)
require.NoError(t, err, "cannot marshal to json")
return string(m)
}

func bothClusterDefaultStrip(url string) {
m := mustParseOpts(handler.StringTagOptions{
Restrict: []handler.StringMatch{
handler.StringMatch{Name: "val", Type: "EQUAL", Value: "1"},
},
})

resp, err := queryWithHeader(url, string(m))
require.NoError(t, err, "failed to query")

data := resp.Data.Result
data.Sort()
require.Equal(t, len(data), 2)
clusters := []string{"coordinator-cluster-a", "coordinator-cluster-b"}
for i, d := range data {
require.Equal(t, 2, len(d.Metric))
require.Equal(t, name, d.Metric["__name__"])
require.Equal(t, clusters[i], d.Metric["cluster"])
}
}

func bothClusterCustomStrip(url string) {
m := mustParseOpts(handler.StringTagOptions{
Restrict: []handler.StringMatch{
handler.StringMatch{Name: "val", Type: "EQUAL", Value: "1"},
},
Strip: []string{"__name__"},
})

resp, err := queryWithHeader(url, string(m))
require.NoError(t, err, "failed to query")

data := resp.Data.Result
data.Sort()
require.Equal(t, len(data), 2)
clusters := []string{"coordinator-cluster-a", "coordinator-cluster-b"}
for i, d := range data {
require.Equal(t, 2, len(d.Metric))
require.Equal(t, clusters[i], d.Metric["cluster"])
require.Equal(t, "1", d.Metric["val"])
}
}

func bothClusterNoStrip(url string) {
m := mustParseOpts(handler.StringTagOptions{
Restrict: []handler.StringMatch{
handler.StringMatch{Name: "val", Type: "EQUAL", Value: "1"},
},
Strip: []string{},
})

resp, err := queryWithHeader(url, string(m))
require.NoError(t, err, "failed to query")

data := resp.Data.Result
data.Sort()
require.Equal(t, len(data), 2)
clusters := []string{"coordinator-cluster-a", "coordinator-cluster-b"}
for i, d := range data {
require.Equal(t, 3, len(d.Metric))
require.Equal(t, name, d.Metric["__name__"])
require.Equal(t, clusters[i], d.Metric["cluster"])
require.Equal(t, "1", d.Metric["val"])
}
}

func bothClusterMultiStrip(url string) {
m := mustParseOpts(handler.StringTagOptions{
Restrict: []handler.StringMatch{
handler.StringMatch{Name: "val", Type: "EQUAL", Value: "1"},
},
Strip: []string{"val", "__name__"},
})

resp, err := queryWithHeader(url, string(m))
require.NoError(t, err, "failed to query")

data := resp.Data.Result
data.Sort()
require.Equal(t, len(data), 2)
clusters := []string{"coordinator-cluster-a", "coordinator-cluster-b"}
for i, d := range data {
require.Equal(t, 1, len(d.Metric))
require.Equal(t, clusters[i], d.Metric["cluster"])
}
}

// NB: cluster 1 is expected to have metrics with vals in range: [1,5]
// and cluster 2 is expected to have metrics with vals in range: [1,10]
// so setting the value to be in (5..10] should hit only a single metric.
func singleClusterDefaultStrip(url string) {
m := mustParseOpts(handler.StringTagOptions{
Restrict: []handler.StringMatch{
handler.StringMatch{Name: "val", Type: "EQUAL", Value: "9"},
},
})

resp, err := queryWithHeader(url, string(m))
require.NoError(t, err, "failed to query")

data := resp.Data.Result
require.Equal(t, len(data), 1, url)
require.Equal(t, 2, len(data[0].Metric))
require.Equal(t, name, data[0].Metric["__name__"], "single")
require.Equal(t, "coordinator-cluster-b", data[0].Metric["cluster"])
}
19 changes: 19 additions & 0 deletions scripts/docker-integration-tests/query_fanout/restrict.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env bash

set -ex
TEST_PATH=$GOPATH/src/github.com/m3db/m3/scripts/docker-integration-tests
FANOUT_PATH=$TEST_PATH/query_fanout
source $TEST_PATH/common.sh
source $FANOUT_PATH/warning.sh

function test_restrictions {
t=$(date +%s)
METRIC_NAME="foo_$t"
# # write 5 metrics to cluster a
write_metrics coordinator-cluster-a 5
# write 10 metrics to cluster b
write_metrics coordinator-cluster-b 10

# unlimited query against cluster a has no header
ATTEMPTS=3 TIMEOUT=1 retry_with_backoff go run $FANOUT_PATH/restrict.go -t $t
}
10 changes: 8 additions & 2 deletions scripts/docker-integration-tests/query_fanout/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@

set -xe

source $GOPATH/src/github.com/m3db/m3/scripts/docker-integration-tests/common.sh
source $GOPATH/src/github.com/m3db/m3/scripts/docker-integration-tests/query_fanout/warning.sh
TEST_PATH=$GOPATH/src/github.com/m3db/m3/scripts/docker-integration-tests
FANOUT_PATH=$TEST_PATH/query_fanout
source $TEST_PATH/common.sh
source $FANOUT_PATH/warning.sh
source $FANOUT_PATH/restrict.sh

REVISION=$(git rev-parse HEAD)
COMPOSE_FILE=$GOPATH/src/github.com/m3db/m3/scripts/docker-integration-tests/query_fanout/docker-compose.yml
Expand Down Expand Up @@ -216,3 +219,6 @@ ATTEMPTS=5 TIMEOUT=1 retry_with_backoff complete_tags

echo "running fanout warning tests"
test_fanout_warnings

echo "running restrict tests"
test_restrictions
2 changes: 2 additions & 0 deletions scripts/docker-integration-tests/query_fanout/warning.sh
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,8 @@ function test_fanout_warning_missing_zone {

ATTEMPTS=3 TIMEOUT=1 retry_with_backoff find_carbon 16 remote_store_cluster-c_complete_tags_warning
ATTEMPTS=3 TIMEOUT=1 retry_with_backoff find_carbon 9 max_fetch_series_limit_applied,remote_store_cluster-c_complete_tags_warning

docker-compose -f ${COMPOSE_FILE} start coordinator-cluster-c
}

function test_fanout_warnings {
Expand Down
2 changes: 1 addition & 1 deletion src/aggregator/aggregator/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ var (
defaultCounterPrefix = []byte("counts.")
defaultTimerPrefix = []byte("timers.")
defaultGaugePrefix = []byte("gauges.")
defaultEntryTTL = 24 * time.Hour
defaultEntryTTL = time.Hour
defaultEntryCheckInterval = time.Hour
defaultEntryCheckBatchPercent = 0.01
defaultMaxTimerBatchSizePerWrite = 0
Expand Down
Loading

0 comments on commit 358a450

Please sign in to comment.