Skip to content
This repository has been archived by the owner on Mar 8, 2023. It is now read-only.

Add support for "show info" #180

Merged
merged 1 commit into from
May 24, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 63 additions & 12 deletions haproxy_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
package main

import (
"bufio"
"crypto/tls"
"encoding/csv"
"errors"
Expand Down Expand Up @@ -58,6 +59,9 @@ const (
ctimeMsField = 59
rtimeMsField = 60
ttimeMsField = 61

showStatCmd = "show stat\n"
showInfoCmd = "show info\n"
)

var (
Expand Down Expand Up @@ -187,15 +191,17 @@ var (
61: newBackendMetric("http_total_time_average_seconds", "Avg. HTTP total time for last 1024 successful connections.", nil),
}

haproxyUp = prometheus.NewDesc(prometheus.BuildFQName(namespace, "", "up"), "Was the last scrape of haproxy successful.", nil, nil)
haproxyInfo = prometheus.NewDesc(prometheus.BuildFQName(namespace, "version", "info"), "HAProxy version info.", []string{"release_date", "version"}, nil)
haproxyUp = prometheus.NewDesc(prometheus.BuildFQName(namespace, "", "up"), "Was the last scrape of HAProxy successful.", nil, nil)
)

// Exporter collects HAProxy stats from the given URI and exports them using
// the prometheus metrics package.
type Exporter struct {
URI string
mutex sync.RWMutex
fetch func() (io.ReadCloser, error)
URI string
mutex sync.RWMutex
fetchInfo func() (io.ReadCloser, error)
fetchStat func() (io.ReadCloser, error)

up prometheus.Gauge
totalScrapes, csvParseFailures prometheus.Counter
Expand All @@ -210,19 +216,22 @@ func NewExporter(uri string, sslVerify bool, selectedServerMetrics map[int]*prom
return nil, err
}

var fetch func() (io.ReadCloser, error)
var fetchInfo func() (io.ReadCloser, error)
var fetchStat func() (io.ReadCloser, error)
switch u.Scheme {
case "http", "https", "file":
fetch = fetchHTTP(uri, sslVerify, timeout)
fetchStat = fetchHTTP(uri, sslVerify, timeout)
case "unix":
fetch = fetchUnix(u, timeout)
fetchInfo = fetchUnix(u, showInfoCmd, timeout)
fetchStat = fetchUnix(u, showStatCmd, timeout)
default:
return nil, fmt.Errorf("unsupported scheme: %q", u.Scheme)
}

return &Exporter{
URI: uri,
fetch: fetch,
URI: uri,
fetchInfo: fetchInfo,
fetchStat: fetchStat,
up: prometheus.NewGauge(prometheus.GaugeOpts{
Namespace: namespace,
Name: "up",
Expand Down Expand Up @@ -255,6 +264,7 @@ func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
for _, m := range e.serverMetrics {
ch <- m
}
ch <- haproxyInfo
ch <- haproxyUp
ch <- e.totalScrapes.Desc()
ch <- e.csvParseFailures.Desc()
Expand Down Expand Up @@ -293,7 +303,7 @@ func fetchHTTP(uri string, sslVerify bool, timeout time.Duration) func() (io.Rea
}
}

func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error) {
func fetchUnix(u *url.URL, cmd string, timeout time.Duration) func() (io.ReadCloser, error) {
return func() (io.ReadCloser, error) {
f, err := net.DialTimeout("unix", u.Path, timeout)
if err != nil {
Expand All @@ -303,7 +313,6 @@ func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error)
f.Close()
return nil, err
}
cmd := "show stat\n"
n, err := io.WriteString(f, cmd)
if err != nil {
f.Close()
Expand All @@ -319,8 +328,25 @@ func fetchUnix(u *url.URL, timeout time.Duration) func() (io.ReadCloser, error)

func (e *Exporter) scrape(ch chan<- prometheus.Metric) (up float64) {
e.totalScrapes.Inc()
var err error

if e.fetchInfo != nil {
infoReader, err := e.fetchInfo()
if err != nil {
level.Error(e.logger).Log("msg", "Can't scrape HAProxy", "err", err)
return 0
}
defer infoReader.Close()

info, err := e.parseInfo(infoReader)
if err != nil {
level.Debug(e.logger).Log("msg", "Faild parsing show info", "err", err)
} else {
ch <- prometheus.MustNewConstMetric(haproxyInfo, prometheus.GaugeValue, 1, info.ReleaseDate, info.Version)
}
}

body, err := e.fetch()
body, err := e.fetchStat()
if err != nil {
level.Error(e.logger).Log("msg", "Can't scrape HAProxy", "err", err)
return 0
Expand Down Expand Up @@ -352,6 +378,31 @@ loop:
return 1
}

type versionInfo struct {
ReleaseDate string
Version string
}

func (e *Exporter) parseInfo(i io.Reader) (versionInfo, error) {
var version, releaseDate string
s := bufio.NewScanner(i)
for s.Scan() {
SuperQ marked this conversation as resolved.
Show resolved Hide resolved
line := s.Text()
if !strings.Contains(line, ":") {
continue
}

field := strings.SplitN(line, ": ", 2)
switch field[0] {
case "Release_date":
releaseDate = field[1]
case "Version":
version = field[1]
}
}
return versionInfo{ReleaseDate: releaseDate, Version: version}, s.Err()
}

func (e *Exporter) parseRow(csvRow []string, ch chan<- prometheus.Metric) {
if len(csvRow) < minimumCsvFieldCount {
level.Error(e.logger).Log("msg", "Parser received unexpected number of CSV fileds", "min", minimumCsvFieldCount, "received", len(csvRow))
Expand Down
12 changes: 9 additions & 3 deletions haproxy_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ import (
"github.com/prometheus/client_golang/prometheus/testutil"
)

const testSocket = "/tmp/haproxyexportertest.sock"
const (
testSocket = "/tmp/haproxyexportertest.sock"
testInfo = "Release_date: test date\nVersion: test version\n"
)

type haproxy struct {
*httptest.Server
Expand Down Expand Up @@ -167,7 +170,7 @@ func TestNotFound(t *testing.T) {
expectMetrics(t, e, "not_found.metrics")
}

func newHaproxyUnix(file, statsPayload string) (io.Closer, error) {
func newHaproxyUnix(file, statsPayload string, infoPayload string) (io.Closer, error) {
if err := os.Remove(file); err != nil && !os.IsNotExist(err) {
return nil, err
}
Expand All @@ -190,6 +193,9 @@ func newHaproxyUnix(file, statsPayload string) (io.Closer, error) {
return
}
switch l {
case "show info\n":
c.Write([]byte(infoPayload))
return
case "show stat\n":
c.Write([]byte(statsPayload))
return
Expand All @@ -209,7 +215,7 @@ func TestUnixDomain(t *testing.T) {
t.Skip("not on windows")
return
}
srv, err := newHaproxyUnix(testSocket, "test,127.0.0.1:8080,0,0,0,0,0,0,0,0,,0,,0,0,0,0,no check,1,1,0,0,,,0,,1,1,1,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,,,,,,,,,,,\n")
srv, err := newHaproxyUnix(testSocket, "test,127.0.0.1:8080,0,0,0,0,0,0,0,0,,0,,0,0,0,0,no check,1,1,0,0,,,0,,1,1,1,,0,,2,0,,0,,,,0,0,0,0,0,0,0,,,,0,0,,,,,,,,,,,\n", testInfo)
if err != nil {
t.Fatalf("can't start test server: %v", err)
}
Expand Down
2 changes: 1 addition & 1 deletion test/deadline.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0
# HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes.
# TYPE haproxy_exporter_total_scrapes counter
haproxy_exporter_total_scrapes 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 0
2 changes: 1 addition & 1 deletion test/invalid_config.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 1
# HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes.
# TYPE haproxy_exporter_total_scrapes counter
haproxy_exporter_total_scrapes 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 1
2 changes: 1 addition & 1 deletion test/not_found.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0
# HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes.
# TYPE haproxy_exporter_total_scrapes counter
haproxy_exporter_total_scrapes 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 0
2 changes: 1 addition & 1 deletion test/older_haproxy_versions.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,6 @@ haproxy_server_up{backend="foo",server="foo-instance-0"} 1
haproxy_server_weight{backend="foo",server="BACKEND"} 1
haproxy_server_weight{backend="foo",server="FRONTEND"} 1
haproxy_server_weight{backend="foo",server="foo-instance-0"} 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 1
2 changes: 1 addition & 1 deletion test/server_broken_csv.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,6 @@ haproxy_server_up{backend="foo",server="foo-instance-0"} 1
haproxy_server_weight{backend="foo",server="BACKEND"} 1
haproxy_server_weight{backend="foo",server="FRONTEND"} 1
haproxy_server_weight{backend="foo",server="foo-instance-0"} 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 1
2 changes: 1 addition & 1 deletion test/server_without_checks.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,6 @@ haproxy_server_up{backend="test",server="127.0.0.1:8080"} 1
# HELP haproxy_server_weight Current weight of the server.
# TYPE haproxy_server_weight gauge
haproxy_server_weight{backend="test",server="127.0.0.1:8080"} 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 1
5 changes: 4 additions & 1 deletion test/unix_domain.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ haproxy_server_up{backend="test",server="127.0.0.1:8080"} 1
# HELP haproxy_server_weight Current weight of the server.
# TYPE haproxy_server_weight gauge
haproxy_server_weight{backend="test",server="127.0.0.1:8080"} 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 1
# HELP haproxy_version_info HAProxy version info.
# TYPE haproxy_version_info gauge
haproxy_version_info{release_date="test date",version="test version"} 1
2 changes: 1 addition & 1 deletion test/unix_domain_deadline.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0
# HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes.
# TYPE haproxy_exporter_total_scrapes counter
haproxy_exporter_total_scrapes 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 0
2 changes: 1 addition & 1 deletion test/unix_domain_not_found.metrics
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ haproxy_exporter_csv_parse_failures 0
# HELP haproxy_exporter_total_scrapes Current total HAProxy scrapes.
# TYPE haproxy_exporter_total_scrapes counter
haproxy_exporter_total_scrapes 1
# HELP haproxy_up Was the last scrape of haproxy successful.
# HELP haproxy_up Was the last scrape of HAProxy successful.
# TYPE haproxy_up gauge
haproxy_up 0