Skip to content

Commit

Permalink
[receiver/prometheus] include scrape configs provided via `scrape_con…
Browse files Browse the repository at this point in the history
…fig_files` (#34897)

**Description:** This PR fixes a bug in the prometheus receiver where
the scrape configs provided via `scrape_config_files` were not applied.
Using `scrape_config_files` instead of providing the scrape configs
directly does come with some limitations regarding the use of env vars,
as also mentioned in
#34786 (comment).

**Link to tracking Issue:** #34786

**Testing:** Added unit tests

---------

Signed-off-by: Florian Bacher <[email protected]>
Co-authored-by: Antoine Toulme <[email protected]>
Co-authored-by: David Ashpole <[email protected]>
  • Loading branch information
3 people authored Sep 11, 2024
1 parent c4420c3 commit c839ca6
Show file tree
Hide file tree
Showing 7 changed files with 144 additions and 3 deletions.
27 changes: 27 additions & 0 deletions .chloggen/prometheus-receiver-scrape-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: bug_fix

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: prometheusreceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Fix the retrieval of scrape configurations by also considering scrape config files

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [34786]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
21 changes: 19 additions & 2 deletions receiver/prometheusreceiver/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,24 @@ type Config struct {

// Validate checks the receiver configuration is valid.
func (cfg *Config) Validate() error {
if (cfg.PrometheusConfig == nil || len(cfg.PrometheusConfig.ScrapeConfigs) == 0) && cfg.TargetAllocator == nil {
if !containsScrapeConfig(cfg) && cfg.TargetAllocator == nil {
return errors.New("no Prometheus scrape_configs or target_allocator set")
}
return nil
}

func containsScrapeConfig(cfg *Config) bool {
if cfg.PrometheusConfig == nil {
return false
}
scrapeConfigs, err := (*promconfig.Config)(cfg.PrometheusConfig).GetScrapeConfigs()
if err != nil {
return false
}

return len(scrapeConfigs) > 0
}

// PromConfig is a redeclaration of promconfig.Config because we need custom unmarshaling
// as prometheus "config" uses `yaml` tags.
type PromConfig promconfig.Config
Expand Down Expand Up @@ -87,7 +99,12 @@ func (cfg *PromConfig) Validate() error {
return fmt.Errorf("unsupported features:\n\t%s", strings.Join(unsupportedFeatures, "\n\t"))
}

for _, sc := range cfg.ScrapeConfigs {
scrapeConfigs, err := (*promconfig.Config)(cfg).GetScrapeConfigs()
if err != nil {
return err
}

for _, sc := range scrapeConfigs {
if err := validateHTTPClientConfig(&sc.HTTPClientConfig); err != nil {
return err
}
Expand Down
13 changes: 13 additions & 0 deletions receiver/prometheusreceiver/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,19 @@ func TestLoadTargetAllocatorConfig(t *testing.T) {
assert.Equal(t, promModel.Duration(5*time.Second), r2.PrometheusConfig.ScrapeConfigs[0].ScrapeInterval)
}

func TestValidateConfigWithScrapeConfigFiles(t *testing.T) {
cm, err := confmaptest.LoadConf(filepath.Join("testdata", "config_scrape_config_files.yaml"))
require.NoError(t, err)
factory := NewFactory()
cfg := factory.CreateDefaultConfig()

sub, err := cm.Sub(component.NewIDWithName(metadata.Type, "").String())
require.NoError(t, err)
require.NoError(t, sub.Unmarshal(cfg))

require.NoError(t, component.ValidateConfig(cfg))
}

func TestLoadConfigFailsOnUnknownSection(t *testing.T) {
cm, err := confmaptest.LoadConf(filepath.Join("testdata", "invalid-config-section.yaml"))
require.NoError(t, err)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
// Copyright The OpenTelemetry Authors
// SPDX-License-Identifier: Apache-2.0

package prometheusreceiver

import (
"fmt"
"os"
"testing"

"github.com/prometheus/prometheus/config"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/pdata/pmetric"
semconv "go.opentelemetry.io/collector/semconv/v1.27.0"
"gopkg.in/yaml.v2"
)

var scrapeFileTargetPage = `
# A simple counter
# TYPE foo counter
foo1 1
`

// TestScrapeConfigFiles validates that scrape configs provided via scrape_config_files are
// also considered and added to the applied configuration

func TestScrapeConfigFiles(t *testing.T) {
setMetricsTimestamp()
targets := []*testData{
{
name: "target1",
pages: []mockPrometheusResponse{
{code: 200, data: scrapeFileTargetPage},
},
validateFunc: verifyScrapeConfigFiles,
},
}

testComponent(t, targets, func(cfg *Config) {
// take the generated scrape config and move it into a file instead
marshalledScrapeConfigs, err := yaml.Marshal(cfg.PrometheusConfig.ScrapeConfigs)
require.NoError(t, err)
tmpDir := t.TempDir()
cfgFileName := fmt.Sprintf("%s/test-scrape-config.yaml", tmpDir)
scrapeConfigFileContent := fmt.Sprintf("scrape_configs:\n%s", string(marshalledScrapeConfigs))
err = os.WriteFile(cfgFileName, []byte(scrapeConfigFileContent), 0400)
require.NoError(t, err)
cfg.PrometheusConfig.ScrapeConfigs = []*config.ScrapeConfig{}
cfg.PrometheusConfig.ScrapeConfigFiles = []string{cfgFileName}
})
}

func verifyScrapeConfigFiles(t *testing.T, _ *testData, result []pmetric.ResourceMetrics) {
require.Len(t, result, 1)
serviceName, ok := result[0].Resource().Attributes().Get(semconv.AttributeServiceName)
assert.True(t, ok)
assert.Equal(t, "target1", serviceName.Str())
assert.Equal(t, 6, result[0].ScopeMetrics().At(0).Metrics().Len())
metricFound := false

for i := 0; i < result[0].ScopeMetrics().At(0).Metrics().Len(); i++ {
if result[0].ScopeMetrics().At(0).Metrics().At(i).Name() == "foo1" {
metricFound = true
break
}
}
assert.True(t, metricFound)
}
6 changes: 5 additions & 1 deletion receiver/prometheusreceiver/targetallocator/manager.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,10 @@ func (m *Manager) sync(compareHash uint64, httpClient *http.Client) (uint64, err
}

func (m *Manager) applyCfg() error {
scrapeConfigs, err := m.promCfg.GetScrapeConfigs()
if err != nil {
return fmt.Errorf("could not get scrape configs: %w", err)
}
if !m.enableNativeHistograms {
// Enforce scraping classic histograms to avoid dropping them.
for _, scrapeConfig := range m.promCfg.ScrapeConfigs {
Expand All @@ -170,7 +174,7 @@ func (m *Manager) applyCfg() error {
}

discoveryCfg := make(map[string]discovery.Configs)
for _, scrapeConfig := range m.promCfg.ScrapeConfigs {
for _, scrapeConfig := range scrapeConfigs {
discoveryCfg[scrapeConfig.JobName] = scrapeConfig.ServiceDiscoveryConfigs
m.settings.Logger.Info("Scrape job added", zap.String("jobName", scrapeConfig.JobName))
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
prometheus:
trim_metric_suffixes: true
use_start_time_metric: true
start_time_metric_regex: '^(.+_)*process_start_time_seconds$'
report_extra_scrape_metrics: true
config:
scrape_config_files:
- ./testdata/scrape-config.yaml
3 changes: 3 additions & 0 deletions receiver/prometheusreceiver/testdata/scrape-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
scrape_configs:
- job_name: 'demo'
scrape_interval: 5s

0 comments on commit c839ca6

Please sign in to comment.