Skip to content

Commit

Permalink
fix: Allow self signed certificates to ScraperTarget (#20047)
Browse files Browse the repository at this point in the history
Co-authored-by: Daniel Moran <[email protected]>
  • Loading branch information
cmackenzie1 and danxmoran committed Nov 17, 2020
1 parent 83a86cb commit 6536154
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 10 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
1. [20012](https://github.com/influxdata/influxdb/pull/20012): Validate input paths to `influxd upgrade` up-front
1. [20017](https://github.com/influxdata/influxdb/pull/20017): Don't include duplicates for SHOW DATABASES
1. [20064](https://github.com/influxdata/influxdb/pull/20064): Ensure Flux reads across all shards.
1. [20047](https://github.com/influxdata/influxdb/pull/20047): Allow scraper to ignore insecure certificates on a target. Thanks @cmackenzie1!

## v2.0.1 [2020-11-10]

Expand Down
25 changes: 23 additions & 2 deletions gather/prometheus.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package gather

import (
"context"
"crypto/tls"
"fmt"
"io"
"math"
Expand All @@ -17,11 +18,31 @@ import (

// prometheusScraper handles parsing prometheus metrics.
// implements Scraper interfaces.
type prometheusScraper struct{}
type prometheusScraper struct {
insecureHttp *http.Client
}

// newPrometheusScraper create a new prometheusScraper.
func newPrometheusScraper() *prometheusScraper {
customTransport := http.DefaultTransport.(*http.Transport).Clone()
customTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
client := &http.Client{Transport: customTransport}

return &prometheusScraper{insecureHttp: client}
}

// Gather parse metrics from a scraper target url.
func (p *prometheusScraper) Gather(ctx context.Context, target influxdb.ScraperTarget) (collected MetricsCollection, err error) {
resp, err := http.Get(target.URL)
var (
resp *http.Response
)

if target.AllowInsecure {
resp, err = p.insecureHttp.Get(target.URL)
} else {
resp, err = http.Get(target.URL)
}

if err != nil {
return collected, err
}
Expand Down
2 changes: 1 addition & 1 deletion gather/scheduler.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func NewScheduler(

for i := 0; i < numScrapers; i++ {
err := s.Subscribe(promTargetSubject, "metrics", &handler{
Scraper: new(prometheusScraper),
Scraper: newPrometheusScraper(),
Publisher: p,
log: log,
})
Expand Down
2 changes: 1 addition & 1 deletion gather/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func TestPrometheusScraper(t *testing.T) {
},
}
for _, c := range cases {
scraper := new(prometheusScraper)
scraper := newPrometheusScraper()
var url string
if c.handler != nil {
ts := httptest.NewServer(c.handler)
Expand Down
4 changes: 4 additions & 0 deletions http/swagger.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10264,6 +10264,10 @@ components:
bucketID:
type: string
description: The ID of the bucket to write to.
allowInsecure:
type: boolean
description: Skip TLS verification on endpoint.
default: false
ScraperTargetResponse:
type: object
allOf:
Expand Down
13 changes: 7 additions & 6 deletions scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@ const (

// ScraperTarget is a target to scrape
type ScraperTarget struct {
ID ID `json:"id,omitempty"`
Name string `json:"name"`
Type ScraperType `json:"type"`
URL string `json:"url"`
OrgID ID `json:"orgID,omitempty"`
BucketID ID `json:"bucketID,omitempty"`
ID ID `json:"id,omitempty"`
Name string `json:"name"`
Type ScraperType `json:"type"`
URL string `json:"url"`
OrgID ID `json:"orgID,omitempty"`
BucketID ID `json:"bucketID,omitempty"`
AllowInsecure bool `json:"allowInsecure,omitempty"`
}

// ScraperTargetStoreService defines the crud service for ScraperTarget.
Expand Down

0 comments on commit 6536154

Please sign in to comment.