Skip to content

Commit

Permalink
Revert "fix: Allow self signed certificates to ScraperTarget (#20047)"
Browse files Browse the repository at this point in the history
This reverts commit b337d42.
  • Loading branch information
danxmoran committed Nov 17, 2020
1 parent b337d42 commit 5faae5a
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 37 deletions.
1 change: 0 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
1. [19987](https://github.com/influxdata/influxdb/pull/19987): Fix various typos. Thanks @kumakichi!
1. [19991](https://github.com/influxdata/influxdb/pull/19991): Use --skip-verify flag for backup/restore CLI command.
1. [19995](https://github.com/influxdata/influxdb/pull/19995): Don't auto-print help on influxd errors
1. [20047](https://github.com/influxdata/influxdb/pull/20047): Allow scraper to ignore insecure certificates on a target. Thanks @cmackenzie1!

## v2.0.1 [2020-11-10]

Expand Down
25 changes: 2 additions & 23 deletions gather/prometheus.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package gather

import (
"context"
"crypto/tls"
"fmt"
"io"
"math"
Expand All @@ -18,31 +17,11 @@ import (

// prometheusScraper handles parsing prometheus metrics.
// implements Scraper interfaces.
type prometheusScraper struct {
insecureHttp *http.Client
}

// newPrometheusScraper create a new prometheusScraper.
func newPrometheusScraper() *prometheusScraper {
customTransport := http.DefaultTransport.(*http.Transport).Clone()
customTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
client := &http.Client{Transport: customTransport}

return &prometheusScraper{insecureHttp: client}
}
type prometheusScraper struct{}

// Gather parse metrics from a scraper target url.
func (p *prometheusScraper) Gather(ctx context.Context, target influxdb.ScraperTarget) (collected MetricsCollection, err error) {
var (
resp *http.Response
)

if target.AllowInsecure {
resp, err = p.insecureHttp.Get(target.URL)
} else {
resp, err = http.Get(target.URL)
}

resp, err := http.Get(target.URL)
if err != nil {
return collected, err
}
Expand Down
2 changes: 1 addition & 1 deletion gather/scheduler.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func NewScheduler(

for i := 0; i < numScrapers; i++ {
err := s.Subscribe(promTargetSubject, "metrics", &handler{
Scraper: newPrometheusScraper(),
Scraper: new(prometheusScraper),
Publisher: p,
log: log,
})
Expand Down
2 changes: 1 addition & 1 deletion gather/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func TestPrometheusScraper(t *testing.T) {
},
}
for _, c := range cases {
scraper := newPrometheusScraper()
scraper := new(prometheusScraper)
var url string
if c.handler != nil {
ts := httptest.NewServer(c.handler)
Expand Down
4 changes: 0 additions & 4 deletions http/swagger.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10264,10 +10264,6 @@ components:
bucketID:
type: string
description: The ID of the bucket to write to.
allowInsecure:
type: boolean
description: Skip TLS verification on endpoint.
default: false
ScraperTargetResponse:
type: object
allOf:
Expand Down
13 changes: 6 additions & 7 deletions scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,12 @@ const (

// ScraperTarget is a target to scrape
type ScraperTarget struct {
ID ID `json:"id,omitempty"`
Name string `json:"name"`
Type ScraperType `json:"type"`
URL string `json:"url"`
OrgID ID `json:"orgID,omitempty"`
BucketID ID `json:"bucketID,omitempty"`
AllowInsecure bool `json:"allowInsecure,omitempty"`
ID ID `json:"id,omitempty"`
Name string `json:"name"`
Type ScraperType `json:"type"`
URL string `json:"url"`
OrgID ID `json:"orgID,omitempty"`
BucketID ID `json:"bucketID,omitempty"`
}

// ScraperTargetStoreService defines the crud service for ScraperTarget.
Expand Down

0 comments on commit 5faae5a

Please sign in to comment.