From acee375f1d35625e1198ba74cded23d32ee517a1 Mon Sep 17 00:00:00 2001 From: Ben Kochie Date: Tue, 19 May 2020 18:51:21 +0200 Subject: [PATCH] Add support for "show info" Add a new haproxy_version_info metric by running the "show info" command on the UNIX socket. This feature is not supported in HTTP mode. It will require some refactoring of the HTTP stats method to handle this. Signed-off-by: Ben Kochie --- haproxy_exporter.go | 75 ++++++++++++++++++++++++----- haproxy_exporter_test.go | 12 +++-- test/deadline.metrics | 2 +- test/invalid_config.metrics | 2 +- test/not_found.metrics | 2 +- test/older_haproxy_versions.metrics | 2 +- test/server_broken_csv.metrics | 2 +- test/server_without_checks.metrics | 2 +- test/unix_domain.metrics | 5 +- test/unix_domain_deadline.metrics | 2 +- test/unix_domain_not_found.metrics | 2 +- 11 files changed, 84 insertions(+), 24 deletions(-) diff --git a/haproxy_exporter.go b/haproxy_exporter.go index bac97ff..e71c0eb 100644 --- a/haproxy_exporter.go +++ b/haproxy_exporter.go @@ -14,6 +14,7 @@ package main import ( + "bufio" "crypto/tls" "encoding/csv" "errors" @@ -58,6 +59,9 @@ const ( ctimeMsField = 59 rtimeMsField = 60 ttimeMsField = 61 + + showStatCmd = "show stat\n" + showInfoCmd = "show info\n" ) var ( @@ -187,15 +191,17 @@ var ( 61: newBackendMetric("http_total_time_average_seconds", "Avg. HTTP total time for last 1024 successful connections.", nil), } - haproxyUp = prometheus.NewDesc(prometheus.BuildFQName(namespace, "", "up"), "Was the last scrape of haproxy successful.", nil, nil) + haproxyInfo = prometheus.NewDesc(prometheus.BuildFQName(namespace, "version", "info"), "HAProxy version info.", []string{"release_date", "version"}, nil) + haproxyUp = prometheus.NewDesc(prometheus.BuildFQName(namespace, "", "up"), "Was the last scrape of HAProxy successful.", nil, nil) ) // Exporter collects HAProxy stats from the given URI and exports them using // the prometheus metrics package. type Exporter struct { - URI string - mutex sync.RWMutex - fetch func() (io.ReadCloser, error) + URI string + mutex sync.RWMutex + fetchInfo func() (io.ReadCloser, error) + fetchStat func() (io.ReadCloser, error) up prometheus.Gauge totalScrapes, csvParseFailures prometheus.Counter @@ -210,19 +216,22 @@ func NewExporter(uri string, sslVerify bool, selectedServerMetrics map[int]*prom return nil, err } - var fetch func() (io.ReadCloser, error) + var fetchInfo func() (io.ReadCloser, error) + var fetchStat func() (io.ReadCloser, error) switch u.Scheme { case "http", "https", "file": - fetch = fetchHTTP(uri, sslVerify, timeout) + fetchStat = fetchHTTP(uri, sslVerify, timeout) case "unix": - fetch = fetchUnix(u, timeout) + fetchInfo = fetchUnix(u, showInfoCmd, timeout) + fetchStat = fetchUnix(u, showStatCmd, timeout) default: return nil, fmt.Errorf("unsupported scheme: %q", u.Scheme) } return &Exporter{ - URI: uri, - fetch: fetch, + URI: uri, + fetchInfo: fetchInfo, + fetchStat: fetchStat, up: prometheus.NewGauge(prometheus.GaugeOpts{ Namespace: namespace, Name: "up", @@ -255,6 +264,7 @@ func (e *Exporter) Describe(ch chan<- *prometheus.Desc) { for _, m := range e.serverMetrics { ch <- m } + ch <- haproxyInfo ch <- haproxyUp ch <- e.totalScrapes.Desc() ch <- e.csvParseFailures.Desc() @@ -293,7 +303,7 @@ func fetchHTTP(uri string, sslVerify bool, timeout time.Duration) func() (io.Rea } } -func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error) { +func fetchUnix(u *url.URL, cmd string, timeout time.Duration) func() (io.ReadCloser, error) { return func() (io.ReadCloser, error) { f, err := net.DialTimeout("unix", u.Path, timeout) if err != nil { @@ -303,7 +313,6 @@ func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error) f.Close() return nil, err } - cmd := "show stat\n" n, err := io.WriteString(f, cmd) if err != nil { f.Close() @@ -319,8 +328,25 @@ func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error) func (e *Exporter) scrape(ch chan<- prometheus.Metric) (up float64) { e.totalScrapes.Inc() + var err error + + if e.fetchInfo != nil { + infoReader, err := e.fetchInfo() + if err != nil { + level.Error(e.logger).Log("msg", "Can't scrape HAProxy", "err", err) + return 0 + } + defer infoReader.Close() + + info, err := e.parseInfo(infoReader) + if err != nil { + level.Debug(e.logger).Log("msg", "Faild parsing show info", "err", err) + } else { + ch <- prometheus.MustNewConstMetric(haproxyInfo, prometheus.GaugeValue, 1, info.ReleaseDate, info.Version) + } + } - body, err := e.fetch() + body, err := e.fetchStat() if err != nil { level.Error(e.logger).Log("msg", "Can't scrape HAProxy", "err", err) return 0 @@ -352,6 +378,31 @@ loop: return 1 } +type versionInfo struct { + ReleaseDate string + Version string +} + +func (e *Exporter) parseInfo(i io.Reader) (versionInfo, error) { + var version, releaseDate string + s := bufio.NewScanner(i) + for s.Scan() { + line := s.Text() + if !strings.Contains(line, ":") { + continue + } + + field := strings.SplitN(line, ": ", 2) + switch field[0] { + case "Release_date": + releaseDate = field[1] + case "Version": + version = field[1] + } + } + return versionInfo{ReleaseDate: releaseDate, Version: version}, s.Err() +} + func (e *Exporter) parseRow(csvRow []string, ch chan<- prometheus.Metric) { if len(csvRow) < minimumCsvFieldCount { level.Error(e.logger).Log("msg", "Parser received unexpected number of CSV fileds", "min", minimumCsvFieldCount, "received", len(csvRow)) diff --git a/haproxy_exporter_test.go b/haproxy_exporter_test.go index b3edb34..46fad5c 100644 --- a/haproxy_exporter_test.go +++ b/haproxy_exporter_test.go @@ -32,7 +32,10 @@ import ( "github.com/prometheus/client_golang/prometheus/testutil" ) -const testSocket = "/tmp/haproxyexportertest.sock" +const ( + testSocket = "/tmp/haproxyexportertest.sock" + testInfo = "Release_date: test date\nVersion: test version\n" +) type haproxy struct { *httptest.Server @@ -167,7 +170,7 @@ func TestNotFound(t *testing.T) { expectMetrics(t, e, "not_found.metrics") } -func newHaproxyUnix(file, statsPayload string) (io.Closer, error) { +func newHaproxyUnix(file, statsPayload string, infoPayload string) (io.Closer, error) { if err := os.Remove(file); err != nil && !os.IsNotExist(err) { return nil, err } @@ -190,6 +193,9 @@ func newHaproxyUnix(file, statsPayload string) (io.Closer, error) { return } switch l { + case "show info\n": + c.Write([]byte(infoPayload)) + return case "show stat\n": c.Write([]byte(statsPayload)) return @@ -209,7 +215,7 @@ func TestUnixDomain(t *testing.T) { t.Skip("not on windows") return } - srv, err := newHaproxyUnix(testSocket, "test,127.0.0.1:8080,0,0,0,0,0,0,0,0,,0,,0,0,0,0,no check,1,1,0,0,,,0,,1,1,1,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,,,,,,,,,,,\n") + srv, err := newHaproxyUnix(testSocket, "test,127.0.0.1:8080,0,0,0,0,0,0,0,0,,0,,0,0,0,0,no check,1,1,0,0,,,0,,1,1,1,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,,,,,,,,,,,\n", testInfo) if err != nil { t.Fatalf("can't start test server: %v", err) } diff --git a/test/deadline.metrics b/test/deadline.metrics index bcf30ce..844de6a 100644 --- a/test/deadline.metrics +++ b/test/deadline.metrics @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0 # HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes. # TYPE haproxy_exporter_total_scrapes counter haproxy_exporter_total_scrapes 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 0 diff --git a/test/invalid_config.metrics b/test/invalid_config.metrics index 7ecbb81..45f92b1 100644 --- a/test/invalid_config.metrics +++ b/test/invalid_config.metrics @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 1 # HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes. # TYPE haproxy_exporter_total_scrapes counter haproxy_exporter_total_scrapes 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 1 diff --git a/test/not_found.metrics b/test/not_found.metrics index bcf30ce..844de6a 100644 --- a/test/not_found.metrics +++ b/test/not_found.metrics @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0 # HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes. # TYPE haproxy_exporter_total_scrapes counter haproxy_exporter_total_scrapes 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 0 diff --git a/test/older_haproxy_versions.metrics b/test/older_haproxy_versions.metrics index 6cf951b..b41ed9a 100644 --- a/test/older_haproxy_versions.metrics +++ b/test/older_haproxy_versions.metrics @@ -84,6 +84,6 @@ haproxy_server_up{backend="foo",server="foo-instance-0"} 1 haproxy_server_weight{backend="foo",server="BACKEND"} 1 haproxy_server_weight{backend="foo",server="FRONTEND"} 1 haproxy_server_weight{backend="foo",server="foo-instance-0"} 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 1 diff --git a/test/server_broken_csv.metrics b/test/server_broken_csv.metrics index 468f7ae..54f18fd 100644 --- a/test/server_broken_csv.metrics +++ b/test/server_broken_csv.metrics @@ -109,6 +109,6 @@ haproxy_server_up{backend="foo",server="foo-instance-0"} 1 haproxy_server_weight{backend="foo",server="BACKEND"} 1 haproxy_server_weight{backend="foo",server="FRONTEND"} 1 haproxy_server_weight{backend="foo",server="foo-instance-0"} 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 1 diff --git a/test/server_without_checks.metrics b/test/server_without_checks.metrics index fccb2c5..aeed06d 100644 --- a/test/server_without_checks.metrics +++ b/test/server_without_checks.metrics @@ -75,6 +75,6 @@ haproxy_server_up{backend="test",server="127.0.0.1:8080"} 1 # HELP haproxy_server_weight Current weight of the server. # TYPE haproxy_server_weight gauge haproxy_server_weight{backend="test",server="127.0.0.1:8080"} 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 1 diff --git a/test/unix_domain.metrics b/test/unix_domain.metrics index fccb2c5..a962798 100644 --- a/test/unix_domain.metrics +++ b/test/unix_domain.metrics @@ -75,6 +75,9 @@ haproxy_server_up{backend="test",server="127.0.0.1:8080"} 1 # HELP haproxy_server_weight Current weight of the server. # TYPE haproxy_server_weight gauge haproxy_server_weight{backend="test",server="127.0.0.1:8080"} 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 1 +# HELP haproxy_version_info HAProxy version info. +# TYPE haproxy_version_info gauge +haproxy_version_info{release_date="test date",version="test version"} 1 diff --git a/test/unix_domain_deadline.metrics b/test/unix_domain_deadline.metrics index bcf30ce..844de6a 100644 --- a/test/unix_domain_deadline.metrics +++ b/test/unix_domain_deadline.metrics @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0 # HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes. # TYPE haproxy_exporter_total_scrapes counter haproxy_exporter_total_scrapes 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 0 diff --git a/test/unix_domain_not_found.metrics b/test/unix_domain_not_found.metrics index bcf30ce..844de6a 100644 --- a/test/unix_domain_not_found.metrics +++ b/test/unix_domain_not_found.metrics @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0 # HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes. # TYPE haproxy_exporter_total_scrapes counter haproxy_exporter_total_scrapes 1 -# HELP haproxy_up Was the last scrape of haproxy successful. +# HELP haproxy_up Was the last scrape of HAProxy successful. # TYPE haproxy_up gauge haproxy_up 0