Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[receiver/riak] - Fix issue where user configured metric settings were ignored #9561

Merged
merged 15 commits into from
May 2, 2022
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
- `resourcedetectionprocessor`: Wire docker detector (#9372)
- `kafkametricsreceiver`: The kafkametricsreceiver was changed to connect to kafka during scrape, rather than startup. If kafka is unavailable the receiver will attempt to connect during subsequent scrapes until succcessful (#8817).
- `datadogexporter`: Update Kubernetes example manifest to new executable name. (#9425).
- `riakreceiver`: Fix issue where user configured metric settings were ignored. (#9561)
- `sqlserverreceiver`: Update `sqlserver.transaction_log.growth.count` and `sqlserver.transaction_log.shrink.count` to be monotonic sums. (#9522)

## v0.49.0
Expand Down
2 changes: 1 addition & 1 deletion receiver/riakreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func newScraper(logger *zap.Logger, cfg *Config, settings component.TelemetrySet
logger: logger,
cfg: cfg,
settings: settings,
mb: metadata.NewMetricsBuilder(metadata.DefaultMetricsSettings()),
mb: metadata.NewMetricsBuilder(cfg.Metrics),
}
}

Expand Down
60 changes: 58 additions & 2 deletions receiver/riakreceiver/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import (

"github.com/open-telemetry/opentelemetry-collector-contrib/internal/scrapertest"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/scrapertest/golden"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/riakreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/riakreceiver/internal/mocks"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/riakreceiver/internal/model"
)
Expand Down Expand Up @@ -58,6 +59,7 @@ func TestScraperStart(t *testing.T) {
},
expectError: true,
},

{
desc: "Valid Config",
scraper: &riakScraper{
Expand Down Expand Up @@ -90,6 +92,7 @@ func TestScaperScrape(t *testing.T) {
desc string
setupMockClient func(t *testing.T) client
expectedMetricGen func(t *testing.T) pmetric.Metrics
setupCfg func() *Config
expectedErr error
}{
{
Expand All @@ -100,6 +103,9 @@ func TestScaperScrape(t *testing.T) {
expectedMetricGen: func(t *testing.T) pmetric.Metrics {
return pmetric.NewMetrics()
},
setupCfg: func() *Config {
return createDefaultConfig().(*Config)
},
expectedErr: errClientNotInit,
},
{
Expand All @@ -112,8 +118,56 @@ func TestScaperScrape(t *testing.T) {
expectedMetricGen: func(t *testing.T) pmetric.Metrics {
return pmetric.NewMetrics()
},
setupCfg: func() *Config {
return createDefaultConfig().(*Config)
},
expectedErr: errors.New("some api error"),
},
{
desc: "Metrics Disabled",
setupMockClient: func(t *testing.T) client {
mockClient := mocks.MockClient{}
// use helper function from client tests
data := loadAPIResponseData(t, statsAPIResponseFile)
var stats *model.Stats
err := json.Unmarshal(data, &stats)
require.NoError(t, err)

mockClient.On("GetStats", mock.Anything).Return(stats, nil)
return &mockClient
},
expectedMetricGen: func(t *testing.T) pmetric.Metrics {
goldenPath := filepath.Join("testdata", "scraper", "expected_disabled.json")
expectedMetrics, err := golden.ReadMetrics(goldenPath)
require.NoError(t, err)
return expectedMetrics
},
setupCfg: func() *Config {
cfg := createDefaultConfig().(*Config)
cfg.Metrics = metadata.MetricsSettings{
RiakMemoryLimit: metadata.MetricSettings{
Enabled: false,
},
RiakNodeOperationCount: metadata.MetricSettings{
Enabled: false,
},
RiakNodeOperationTimeMean: metadata.MetricSettings{
Enabled: true,
},
RiakNodeReadRepairCount: metadata.MetricSettings{
Enabled: true,
},
RiakVnodeIndexOperationCount: metadata.MetricSettings{
Enabled: true,
},
RiakVnodeOperationCount: metadata.MetricSettings{
Enabled: true,
},
}
return cfg
},
expectedErr: nil,
},
{
desc: "Successful Collection",
setupMockClient: func(t *testing.T) client {
Expand All @@ -133,16 +187,18 @@ func TestScaperScrape(t *testing.T) {
require.NoError(t, err)
return expectedMetrics
},
setupCfg: func() *Config {
return createDefaultConfig().(*Config)
},
expectedErr: nil,
},
}

for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
scraper := newScraper(zap.NewNop(), createDefaultConfig().(*Config), componenttest.NewNopTelemetrySettings())
scraper := newScraper(zap.NewNop(), tc.setupCfg(), componenttest.NewNopTelemetrySettings())
scraper.client = tc.setupMockClient(t)
actualMetrics, err := scraper.scrape(context.Background())

if tc.expectedErr == nil {
require.NoError(t, err)
} else {
Expand Down
162 changes: 162 additions & 0 deletions receiver/riakreceiver/testdata/scraper/expected_disabled.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
{
"resourceMetrics": [
{
"instrumentationLibraryMetrics": [
{
"instrumentationLibrary": {
"name": "otelcol/riakreceiver"
},
"metrics": [
{
"description": "The mean time between request and response for operations performed by the node over the last minute.",
"gauge": {
"dataPoints": [
{
"asInt": "4",
"attributes": [
{
"key": "request",
"value": {
"stringValue": "get"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
},
{
"asInt": "5",
"attributes": [
{
"key": "request",
"value": {
"stringValue": "put"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
}
]
},
"name": "riak.node.operation.time.mean",
"unit": "us"
},
{
"description": "The number of read repairs performed by the node.",
"name": "riak.node.read_repair.count",
"sum": {
"aggregationTemporality": "AGGREGATION_TEMPORALITY_CUMULATIVE",
"dataPoints": [
{
"asInt": "0",
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
}
],
"isMonotonic": true
},
"unit": "{read_repair}"
},
{
"description": "The number of index operations performed by vnodes on the node.",
"name": "riak.vnode.index.operation.count",
"sum": {
"aggregationTemporality": "AGGREGATION_TEMPORALITY_CUMULATIVE",
"dataPoints": [
{
"asInt": "10",
"attributes": [
{
"key": "operation",
"value": {
"stringValue": "read"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
},
{
"asInt": "11",
"attributes": [
{
"key": "operation",
"value": {
"stringValue": "write"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
},
{
"asInt": "9",
"attributes": [
{
"key": "operation",
"value": {
"stringValue": "delete"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
}
]
},
"unit": "{operation}"
},
{
"description": "The number of operations performed by vnodes on the node.",
"name": "riak.vnode.operation.count",
"sum": {
"aggregationTemporality": "AGGREGATION_TEMPORALITY_CUMULATIVE",
"dataPoints": [
{
"asInt": "7",
"attributes": [
{
"key": "request",
"value": {
"stringValue": "get"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
},
{
"asInt": "8",
"attributes": [
{
"key": "request",
"value": {
"stringValue": "put"
}
}
],
"startTimeUnixNano": "1648220661611816000",
"timeUnixNano": "1648220661612587000"
}
],
"isMonotonic": true
},
"unit": "{operation}"
}
]
}
],
"resource": {
"attributes": [
{
"key": "riak.node.name",
"value": {
"stringValue": "[email protected]"
}
}
]
}
}
]
}