Skip to content

Commit

Permalink
Implement --log.prober
Browse files Browse the repository at this point in the history
Signed-off-by: Jan-Otto Kröpke <[email protected]>
  • Loading branch information
jkroepke committed Jul 10, 2023
1 parent 6dc1237 commit 7643270
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 28 deletions.
7 changes: 5 additions & 2 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ var (
configFile = kingpin.Flag("config.file", "Blackbox exporter configuration file.").Default("blackbox.yml").String()
timeoutOffset = kingpin.Flag("timeout-offset", "Offset to subtract from timeout in seconds.").Default("0.5").Float64()
configCheck = kingpin.Flag("config.check", "If true validate the config file and then exit.").Default().Bool()
logProbeErrors = kingpin.Flag("log.probe-errors", "Log error from probe requests").Default().Bool()
logLevelProber = kingpin.Flag("log.prober", "Log level from probe requests. One of: [debug, info, warn, error, none]").Default("none").String()
historyLimit = kingpin.Flag("history.limit", "The maximum amount of items to keep in the history.").Default("100").Uint()
externalURL = kingpin.Flag("web.external-url", "The URL under which Blackbox exporter is externally reachable (for example, if Blackbox exporter is served via a reverse proxy). Used for generating relative and absolute links back to Blackbox exporter itself. If the URL has a path portion, it will be used to prefix all HTTP endpoints served by Blackbox exporter. If omitted, relevant URL components will be derived automatically.").PlaceHolder("<url>").String()
routePrefix = kingpin.Flag("web.route-prefix", "Prefix for the internal routes of web endpoints. Defaults to path of --web.external-url.").PlaceHolder("<path>").String()
Expand Down Expand Up @@ -80,6 +80,9 @@ func run() int {
logger := promlog.New(promlogConfig)
rh := &prober.ResultHistory{MaxResults: *historyLimit}

logLevelProberValue, _ := level.Parse(*logLevelProber)
logLevelProber := level.Allow(logLevelProberValue)

level.Info(logger).Log("msg", "Starting blackbox_exporter", "version", version.Info())
level.Info(logger).Log("build_context", version.BuildContext())

Expand Down Expand Up @@ -183,7 +186,7 @@ func run() int {
sc.Lock()
conf := sc.C
sc.Unlock()
prober.Handler(w, r, conf, logger, rh, *timeoutOffset, nil, moduleUnknownCounter, *logProbeErrors)
prober.Handler(w, r, conf, logger, rh, *timeoutOffset, nil, moduleUnknownCounter, logLevelProber)
})
http.HandleFunc(*routePrefix, func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
Expand Down
36 changes: 10 additions & 26 deletions prober/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ var (
}
)

func Handler(w http.ResponseWriter, r *http.Request, c *config.Config, logger log.Logger,
rh *ResultHistory, timeoutOffset float64, params url.Values, moduleUnknownCounter prometheus.Counter,
logProbeErrors bool) {
func Handler(w http.ResponseWriter, r *http.Request, c *config.Config, logger log.Logger, rh *ResultHistory, timeoutOffset float64, params url.Values,
moduleUnknownCounter prometheus.Counter,
logLevelProber level.Option) {

if params == nil {
params = r.URL.Query()
Expand Down Expand Up @@ -109,7 +109,7 @@ func Handler(w http.ResponseWriter, r *http.Request, c *config.Config, logger lo
}
}

sl := newScrapeLogger(logger, moduleName, target, logProbeErrors)
sl := newScrapeLogger(logger, moduleName, target, logLevelProber)
level.Info(sl).Log("msg", "Beginning probe", "probe", module.Prober, "timeout_seconds", timeoutSeconds)

start := time.Now()
Expand Down Expand Up @@ -160,15 +160,15 @@ type scrapeLogger struct {
next log.Logger
buffer bytes.Buffer
bufferLogger log.Logger
logErrors bool
logLevel level.Option
}

func newScrapeLogger(logger log.Logger, module string, target string, logProbeErrors bool) *scrapeLogger {
func newScrapeLogger(logger log.Logger, module string, target string, logLevel level.Option) *scrapeLogger {
logger = log.With(logger, "module", module, "target", target)
sl := &scrapeLogger{
next: logger,
buffer: bytes.Buffer{},
logErrors: logProbeErrors,
next: logger,
buffer: bytes.Buffer{},
logLevel: logLevel,
}
bl := log.NewLogfmtLogger(&sl.buffer)
sl.bufferLogger = log.With(bl, "ts", log.DefaultTimestampUTC, "caller", log.Caller(6), "module", module, "target", target)
Expand All @@ -177,24 +177,8 @@ func newScrapeLogger(logger log.Logger, module string, target string, logProbeEr

func (sl scrapeLogger) Log(keyvals ...interface{}) error {
sl.bufferLogger.Log(keyvals...)
kvs := make([]interface{}, len(keyvals))
copy(kvs, keyvals)

if sl.logErrors {
for i := 0; i < len(kvs); i += 2 {
if kvs[i] == level.Key() && kvs[i+1] == level.ErrorValue() {
return sl.next.Log(kvs...)
}
}
}

// Switch level to debug for application output.
for i := 0; i < len(kvs); i += 2 {
if kvs[i] == level.Key() {
kvs[i+1] = level.DebugValue()
}
}
return sl.next.Log(kvs...)
return level.NewFilter(sl.next, sl.logLevel).Log(keyvals...)
}

// DebugOutput returns plaintext debug output for a probe.
Expand Down

0 comments on commit 7643270

Please sign in to comment.