From 113340b8edcbd52a8277815efccf0ca9c1931a4b Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Tue, 10 Sep 2024 23:11:01 +0300 Subject: [PATCH 01/10] Feature: Add redis search module metrics fixes: #942 --- Makefile | 3 +- README.md | 1 + docker-compose.yml | 5 ++ exporter/exporter.go | 155 ++++++++++++++++++++++------------------ exporter/http_test.go | 1 + exporter/search.go | 41 +++++++++++ exporter/search_test.go | 69 ++++++++++++++++++ main.go | 2 + 8 files changed, 206 insertions(+), 71 deletions(-) create mode 100644 exporter/search.go create mode 100644 exporter/search_test.go diff --git a/Makefile b/Makefile index cb3ee4d6..daad096e 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,8 @@ test: TEST_REDIS_CLUSTER_PASSWORD_URI="redis://localhost:17006" \ TEST_TILE38_URI="redis://localhost:19851" \ TEST_REDIS_SENTINEL_URI="redis://localhost:26379" \ - go test -v -covermode=atomic -cover -race -coverprofile=coverage.txt -p 1 ./... + TEST_REDIS_SEARCH_URI="redis://redis-search:6379" \ + go test -v -covermode=atomic -cover -race -coverprofile=coverage.txt -p 1 ./... .PHONY: lint lint: diff --git a/README.md b/README.md index dd1948e1..70c3d9aa 100644 --- a/README.md +++ b/README.md @@ -166,6 +166,7 @@ Prometheus uses file watches and all changes to the json file are applied immedi | ping-on-connect | REDIS_EXPORTER_PING_ON_CONNECT | Whether to ping the redis instance after connecting and record the duration as a metric, defaults to false. | | is-tile38 | REDIS_EXPORTER_IS_TILE38 | Whether to scrape Tile38 specific metrics, defaults to false. | | is-cluster | REDIS_EXPORTER_IS_CLUSTER | Whether this is a redis cluster (Enable this if you need to fetch key level data on a Redis Cluster). | +| is-search | REDIS_EXPORTER_IS_SEARCH | Whether this is a redis has RediSearch module enabled and metrics should be collected. | | export-client-list | REDIS_EXPORTER_EXPORT_CLIENT_LIST | Whether to scrape Client List specific metrics, defaults to false. | | export-client-port | REDIS_EXPORTER_EXPORT_CLIENT_PORT | Whether to include the client's port when exporting the client list. Warning: including the port increases the number of metrics generated and will make your Prometheus server take up more memory | | skip-tls-verification | REDIS_EXPORTER_SKIP_TLS_VERIFICATION | Whether to to skip TLS verification when the exporter connects to a Redis instance | diff --git a/docker-compose.yml b/docker-compose.yml index 9e4202c9..7c0321de 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -83,3 +83,8 @@ services: image: tile38/tile38:latest ports: - "19851:9851" + + redis-search: + image: redis/redis-stack-server:7.4.0-v0 + ports: + - "7379:6379" diff --git a/exporter/exporter.go b/exporter/exporter.go index c740ad8c..791836b7 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -65,6 +65,7 @@ type Options struct { ClientKeyFile string CaCertFile string InclConfigMetrics bool + IsSearch bool DisableExportingKeyValues bool ExcludeLatencyHistogramMetrics bool RedactConfigMetrics bool @@ -267,6 +268,16 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { "server_threads": "server_threads_total", "long_lock_waits": "long_lock_waits_total", "current_client_thread": "current_client_thread", + + // RediSearch module metrics + "search_number_of_indexes": "search_number_of_indexes", + "search_used_memory_indexes": "search_used_memory_indexes", + "search_total_indexing_time": "search_total_indexing_time", + "search_global_idle": "search_global_idle", + "search_global_total": "search_global_total", + "search_bytes_collected": "search_bytes_collected", + "search_total_cycles": "search_total_cycles", + "search_total_ms_run": "search_total_ms_run", }, metricMapCounters: map[string]string{ @@ -350,77 +361,77 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { txt string lbls []string }{ - "commands_duration_seconds_total": {txt: `Total amount of time in seconds spent per command`, lbls: []string{"cmd"}}, - "commands_failed_calls_total": {txt: `Total number of errors prior command execution per command`, lbls: []string{"cmd"}}, - "commands_rejected_calls_total": {txt: `Total number of errors within command execution per command`, lbls: []string{"cmd"}}, - "commands_total": {txt: `Total number of calls per command`, lbls: []string{"cmd"}}, - "commands_latencies_usec": {txt: `A histogram of latencies per command`, lbls: []string{"cmd"}}, - "latency_percentiles_usec": {txt: `A summary of latency percentile distribution per command`, lbls: []string{"cmd"}}, - "config_client_output_buffer_limit_bytes": {txt: `The configured buffer limits per class`, lbls: []string{"class", "limit"}}, + "commands_duration_seconds_total": {txt: `Total amount of time in seconds spent per command`, lbls: []string{"cmd"}}, + "commands_failed_calls_total": {txt: `Total number of errors prior command execution per command`, lbls: []string{"cmd"}}, + "commands_rejected_calls_total": {txt: `Total number of errors within command execution per command`, lbls: []string{"cmd"}}, + "commands_total": {txt: `Total number of calls per command`, lbls: []string{"cmd"}}, + "commands_latencies_usec": {txt: `A histogram of latencies per command`, lbls: []string{"cmd"}}, + "latency_percentiles_usec": {txt: `A summary of latency percentile distribution per command`, lbls: []string{"cmd"}}, + "config_client_output_buffer_limit_bytes": {txt: `The configured buffer limits per class`, lbls: []string{"class", "limit"}}, "config_client_output_buffer_limit_overcome_seconds": {txt: `How long for buffer limits per class to be exceeded before replicas are dropped`, lbls: []string{"class", "limit"}}, - "config_key_value": {txt: `Config key and value`, lbls: []string{"key", "value"}}, - "config_value": {txt: `Config key and value as metric`, lbls: []string{"key"}}, - "connected_slave_lag_seconds": {txt: "Lag of connected slave", lbls: []string{"slave_ip", "slave_port", "slave_state"}}, - "connected_slave_offset_bytes": {txt: "Offset of connected slave", lbls: []string{"slave_ip", "slave_port", "slave_state"}}, - "db_avg_ttl_seconds": {txt: "Avg TTL in seconds", lbls: []string{"db"}}, - "db_keys": {txt: "Total number of keys by DB", lbls: []string{"db"}}, - "db_keys_expiring": {txt: "Total number of expiring keys by DB", lbls: []string{"db"}}, - "db_keys_cached": {txt: "Total number of cached keys by DB", lbls: []string{"db"}}, - "errors_total": {txt: `Total number of errors per error type`, lbls: []string{"err"}}, - "exporter_last_scrape_error": {txt: "The last scrape error status.", lbls: []string{"err"}}, - "instance_info": {txt: "Information about the Redis instance", lbls: []string{"role", "redis_version", "redis_build_id", "redis_mode", "os", "maxmemory_policy", "tcp_port", "run_id", "process_id", "master_replid"}}, - "key_group_count": {txt: `Count of keys in key group`, lbls: []string{"db", "key_group"}}, - "key_group_memory_usage_bytes": {txt: `Total memory usage of key group in bytes`, lbls: []string{"db", "key_group"}}, - "key_size": {txt: `The length or size of "key"`, lbls: []string{"db", "key"}}, - "key_value": {txt: `The value of "key"`, lbls: []string{"db", "key"}}, - "key_value_as_string": {txt: `The value of "key" as a string`, lbls: []string{"db", "key", "val"}}, - "keys_count": {txt: `Count of keys`, lbls: []string{"db", "key"}}, - "last_key_groups_scrape_duration_milliseconds": {txt: `Duration of the last key group metrics scrape in milliseconds`}, - "last_slow_execution_duration_seconds": {txt: `The amount of time needed for last slow execution, in seconds`}, - "latency_spike_duration_seconds": {txt: `Length of the last latency spike in seconds`, lbls: []string{"event_name"}}, - "latency_spike_last": {txt: `When the latency spike last occurred`, lbls: []string{"event_name"}}, - "master_last_io_seconds_ago": {txt: "Master last io seconds ago", lbls: []string{"master_host", "master_port"}}, - "master_link_up": {txt: "Master link status on Redis slave", lbls: []string{"master_host", "master_port"}}, - "master_sync_in_progress": {txt: "Master sync in progress", lbls: []string{"master_host", "master_port"}}, - "number_of_distinct_key_groups": {txt: `Number of distinct key groups`, lbls: []string{"db"}}, - "script_result": {txt: "Result of the collect script evaluation", lbls: []string{"filename"}}, - "script_values": {txt: "Values returned by the collect script", lbls: []string{"key", "filename"}}, - "sentinel_master_ok_sentinels": {txt: "The number of okay sentinels monitoring this master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_ok_slaves": {txt: "The number of okay slaves of the master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_sentinels": {txt: "The number of sentinels monitoring this master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_slaves": {txt: "The number of slaves of the master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_status": {txt: "Master status on Sentinel", lbls: []string{"master_name", "master_address", "master_status"}}, - "sentinel_master_ckquorum_status": {txt: "Master ckquorum status", lbls: []string{"master_name", "message"}}, - "sentinel_masters": {txt: "The number of masters this sentinel is watching"}, - "sentinel_master_setting_ckquorum": {txt: "Show the current ckquorum config for each master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_setting_failover_timeout": {txt: "Show the current failover-timeout config for each master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_setting_parallel_syncs": {txt: "Show the current parallel-syncs config for each master", lbls: []string{"master_name", "master_address"}}, - "sentinel_master_setting_down_after_milliseconds": {txt: "Show the current down-after-milliseconds config for each master", lbls: []string{"master_name", "master_address"}}, - "sentinel_running_scripts": {txt: "Number of scripts in execution right now"}, - "sentinel_scripts_queue_length": {txt: "Queue of user scripts to execute"}, - "sentinel_simulate_failure_flags": {txt: "Failures simulations"}, - "sentinel_tilt": {txt: "Sentinel is in TILT mode"}, - "slave_info": {txt: "Information about the Redis slave", lbls: []string{"master_host", "master_port", "read_only"}}, - "slave_repl_offset": {txt: "Slave replication offset", lbls: []string{"master_host", "master_port"}}, - "slowlog_last_id": {txt: `Last id of slowlog`}, - "slowlog_length": {txt: `Total slowlog`}, - "start_time_seconds": {txt: "Start time of the Redis instance since unix epoch in seconds."}, - "stream_group_consumer_idle_seconds": {txt: `Consumer idle time in seconds`, lbls: []string{"db", "stream", "group", "consumer"}}, - "stream_group_consumer_messages_pending": {txt: `Pending number of messages for this specific consumer`, lbls: []string{"db", "stream", "group", "consumer"}}, - "stream_group_consumers": {txt: `Consumers count of stream group`, lbls: []string{"db", "stream", "group"}}, - "stream_group_entries_read": {txt: `Total number of entries read from the stream group`, lbls: []string{"db", "stream", "group"}}, - "stream_group_lag": {txt: `The number of messages waiting to be delivered to the stream group's consumers`, lbls: []string{"db", "stream", "group"}}, - "stream_group_last_delivered_id": {txt: `The epoch timestamp (ms) of the last delivered message`, lbls: []string{"db", "stream", "group"}}, - "stream_group_messages_pending": {txt: `Pending number of messages in that stream group`, lbls: []string{"db", "stream", "group"}}, - "stream_groups": {txt: `Groups count of stream`, lbls: []string{"db", "stream"}}, - "stream_last_generated_id": {txt: `The epoch timestamp (ms) of the latest message on the stream`, lbls: []string{"db", "stream"}}, - "stream_length": {txt: `The number of elements of the stream`, lbls: []string{"db", "stream"}}, - "stream_max_deleted_entry_id": {txt: `The epoch timestamp (ms) of last message was deleted from the stream`, lbls: []string{"db", "stream"}}, - "stream_first_entry_id": {txt: `The epoch timestamp (ms) of the first message in the stream`, lbls: []string{"db", "stream"}}, - "stream_last_entry_id": {txt: `The epoch timestamp (ms) of the last message in the stream`, lbls: []string{"db", "stream"}}, - "stream_radix_tree_keys": {txt: `Radix tree keys count"`, lbls: []string{"db", "stream"}}, - "stream_radix_tree_nodes": {txt: `Radix tree nodes count`, lbls: []string{"db", "stream"}}, - "up": {txt: "Information about the Redis instance"}, + "config_key_value": {txt: `Config key and value`, lbls: []string{"key", "value"}}, + "config_value": {txt: `Config key and value as metric`, lbls: []string{"key"}}, + "connected_slave_lag_seconds": {txt: "Lag of connected slave", lbls: []string{"slave_ip", "slave_port", "slave_state"}}, + "connected_slave_offset_bytes": {txt: "Offset of connected slave", lbls: []string{"slave_ip", "slave_port", "slave_state"}}, + "db_avg_ttl_seconds": {txt: "Avg TTL in seconds", lbls: []string{"db"}}, + "db_keys": {txt: "Total number of keys by DB", lbls: []string{"db"}}, + "db_keys_expiring": {txt: "Total number of expiring keys by DB", lbls: []string{"db"}}, + "db_keys_cached": {txt: "Total number of cached keys by DB", lbls: []string{"db"}}, + "errors_total": {txt: `Total number of errors per error type`, lbls: []string{"err"}}, + "exporter_last_scrape_error": {txt: "The last scrape error status.", lbls: []string{"err"}}, + "instance_info": {txt: "Information about the Redis instance", lbls: []string{"role", "redis_version", "redis_build_id", "redis_mode", "os", "maxmemory_policy", "tcp_port", "run_id", "process_id", "master_replid"}}, + "key_group_count": {txt: `Count of keys in key group`, lbls: []string{"db", "key_group"}}, + "key_group_memory_usage_bytes": {txt: `Total memory usage of key group in bytes`, lbls: []string{"db", "key_group"}}, + "key_size": {txt: `The length or size of "key"`, lbls: []string{"db", "key"}}, + "key_value": {txt: `The value of "key"`, lbls: []string{"db", "key"}}, + "key_value_as_string": {txt: `The value of "key" as a string`, lbls: []string{"db", "key", "val"}}, + "keys_count": {txt: `Count of keys`, lbls: []string{"db", "key"}}, + "last_key_groups_scrape_duration_milliseconds": {txt: `Duration of the last key group metrics scrape in milliseconds`}, + "last_slow_execution_duration_seconds": {txt: `The amount of time needed for last slow execution, in seconds`}, + "latency_spike_duration_seconds": {txt: `Length of the last latency spike in seconds`, lbls: []string{"event_name"}}, + "latency_spike_last": {txt: `When the latency spike last occurred`, lbls: []string{"event_name"}}, + "master_last_io_seconds_ago": {txt: "Master last io seconds ago", lbls: []string{"master_host", "master_port"}}, + "master_link_up": {txt: "Master link status on Redis slave", lbls: []string{"master_host", "master_port"}}, + "master_sync_in_progress": {txt: "Master sync in progress", lbls: []string{"master_host", "master_port"}}, + "number_of_distinct_key_groups": {txt: `Number of distinct key groups`, lbls: []string{"db"}}, + "script_result": {txt: "Result of the collect script evaluation", lbls: []string{"filename"}}, + "script_values": {txt: "Values returned by the collect script", lbls: []string{"key", "filename"}}, + "sentinel_master_ok_sentinels": {txt: "The number of okay sentinels monitoring this master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_ok_slaves": {txt: "The number of okay slaves of the master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_sentinels": {txt: "The number of sentinels monitoring this master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_slaves": {txt: "The number of slaves of the master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_status": {txt: "Master status on Sentinel", lbls: []string{"master_name", "master_address", "master_status"}}, + "sentinel_master_ckquorum_status": {txt: "Master ckquorum status", lbls: []string{"master_name", "message"}}, + "sentinel_masters": {txt: "The number of masters this sentinel is watching"}, + "sentinel_master_setting_ckquorum": {txt: "Show the current ckquorum config for each master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_setting_failover_timeout": {txt: "Show the current failover-timeout config for each master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_setting_parallel_syncs": {txt: "Show the current parallel-syncs config for each master", lbls: []string{"master_name", "master_address"}}, + "sentinel_master_setting_down_after_milliseconds": {txt: "Show the current down-after-milliseconds config for each master", lbls: []string{"master_name", "master_address"}}, + "sentinel_running_scripts": {txt: "Number of scripts in execution right now"}, + "sentinel_scripts_queue_length": {txt: "Queue of user scripts to execute"}, + "sentinel_simulate_failure_flags": {txt: "Failures simulations"}, + "sentinel_tilt": {txt: "Sentinel is in TILT mode"}, + "slave_info": {txt: "Information about the Redis slave", lbls: []string{"master_host", "master_port", "read_only"}}, + "slave_repl_offset": {txt: "Slave replication offset", lbls: []string{"master_host", "master_port"}}, + "slowlog_last_id": {txt: `Last id of slowlog`}, + "slowlog_length": {txt: `Total slowlog`}, + "start_time_seconds": {txt: "Start time of the Redis instance since unix epoch in seconds."}, + "stream_group_consumer_idle_seconds": {txt: `Consumer idle time in seconds`, lbls: []string{"db", "stream", "group", "consumer"}}, + "stream_group_consumer_messages_pending": {txt: `Pending number of messages for this specific consumer`, lbls: []string{"db", "stream", "group", "consumer"}}, + "stream_group_consumers": {txt: `Consumers count of stream group`, lbls: []string{"db", "stream", "group"}}, + "stream_group_entries_read": {txt: `Total number of entries read from the stream group`, lbls: []string{"db", "stream", "group"}}, + "stream_group_lag": {txt: `The number of messages waiting to be delivered to the stream group's consumers`, lbls: []string{"db", "stream", "group"}}, + "stream_group_last_delivered_id": {txt: `The epoch timestamp (ms) of the last delivered message`, lbls: []string{"db", "stream", "group"}}, + "stream_group_messages_pending": {txt: `Pending number of messages in that stream group`, lbls: []string{"db", "stream", "group"}}, + "stream_groups": {txt: `Groups count of stream`, lbls: []string{"db", "stream"}}, + "stream_last_generated_id": {txt: `The epoch timestamp (ms) of the latest message on the stream`, lbls: []string{"db", "stream"}}, + "stream_length": {txt: `The number of elements of the stream`, lbls: []string{"db", "stream"}}, + "stream_max_deleted_entry_id": {txt: `The epoch timestamp (ms) of last message was deleted from the stream`, lbls: []string{"db", "stream"}}, + "stream_first_entry_id": {txt: `The epoch timestamp (ms) of the first message in the stream`, lbls: []string{"db", "stream"}}, + "stream_last_entry_id": {txt: `The epoch timestamp (ms) of the last message in the stream`, lbls: []string{"db", "stream"}}, + "stream_radix_tree_keys": {txt: `Radix tree keys count"`, lbls: []string{"db", "stream"}}, + "stream_radix_tree_nodes": {txt: `Radix tree nodes count`, lbls: []string{"db", "stream"}}, + "up": {txt: "Information about the Redis instance"}, } { e.metricDescriptions[k] = newMetricDescr(opts.Namespace, k, desc.txt, desc.lbls) } @@ -698,6 +709,10 @@ func (e *Exporter) scrapeRedisHost(ch chan<- prometheus.Metric) error { e.extractTile38Metrics(ch, c) } + if e.options.IsSearch { + e.extractSearchMetrics(ch, c) + } + if len(e.options.LuaScript) > 0 { for filename, script := range e.options.LuaScript { if err := e.extractLuaScriptMetrics(ch, c, filename, script); err != nil { diff --git a/exporter/http_test.go b/exporter/http_test.go index db672a93..ef8565fc 100644 --- a/exporter/http_test.go +++ b/exporter/http_test.go @@ -210,6 +210,7 @@ func TestSimultaneousMetricsHttpRequests(t *testing.T) { os.Getenv("TEST_REDIS5_URI"), os.Getenv("TEST_REDIS6_URI"), + os.Getenv("TEST_REDIS_SEARCH_URI"), // tile38 & Cluster need to be last in this list so we can identify them when selected, down in line 229 os.Getenv("TEST_REDIS_CLUSTER_MASTER_URI"), diff --git a/exporter/search.go b/exporter/search.go new file mode 100644 index 00000000..dcb69d87 --- /dev/null +++ b/exporter/search.go @@ -0,0 +1,41 @@ +package exporter + +import ( + "strings" + + "github.com/gomodule/redigo/redis" + "github.com/prometheus/client_golang/prometheus" + log "github.com/sirupsen/logrus" +) + +func (e *Exporter) extractSearchMetrics(ch chan<- prometheus.Metric, c redis.Conn) { + for _, section := range [5]string{"search_version", "search_index", "search_memory", "search_cursors", "search_gc"} { + info, err := redis.String(doRedisCmd(c, "INFO", section)) + if err != nil { + log.Errorf("extractSearchMetrics() err: %s", err) + return + } + e.registerSearchMetrics(ch, info) + } + +} + +func (e *Exporter) registerSearchMetrics(ch chan<- prometheus.Metric, info string) { + lines := strings.Split(info, "\r\n") + + for _, line := range lines { + log.Debugf("info: %s", line) + + split := strings.Split(line, ":") + if len(split) != 2 { + continue + } + fieldKey := split[0] + fieldValue := split[1] + + if !e.includeMetric(fieldKey) { + continue + } + e.parseAndRegisterConstMetric(ch, fieldKey, fieldValue) + } +} diff --git a/exporter/search_test.go b/exporter/search_test.go new file mode 100644 index 00000000..59a5361f --- /dev/null +++ b/exporter/search_test.go @@ -0,0 +1,69 @@ +package exporter + +import ( + "os" + "strings" + "testing" + + "github.com/prometheus/client_golang/prometheus" +) + +func TestSearch(t *testing.T) { + if os.Getenv("TEST_REDIS_SEARCH_URI") == "" { + t.Skipf("TEST_REDIS_SEARCH_URI not set - skipping") + } + + tsts := []struct { + addr string + isSearch bool + wantSearchMetrics bool + }{ + {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: true, wantSearchMetrics: true}, + {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: false, wantSearchMetrics: false}, + {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: true, wantSearchMetrics: false}, + {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: false, wantSearchMetrics: false}, + } + + for _, tst := range tsts { + e, _ := NewRedisExporter(tst.addr, Options{Namespace: "test", IsTile38: tst.isSearch}) + + chM := make(chan prometheus.Metric) + go func() { + e.Collect(chM) + close(chM) + }() + + wantedMetrics := map[string]bool{ + "search_number_of_indexes": false, + "search_used_memory_indexes": false, + "search_total_indexing_time": false, + "search_global_idle": false, + "search_global_total": false, + "search_bytes_collected": false, + "search_total_cycles": false, + "search_total_ms_run": false, + } + + for m := range chM { + for want := range wantedMetrics { + if strings.Contains(m.Desc().String(), want) { + wantedMetrics[want] = true + } + } + } + + if tst.wantSearchMetrics { + for want, found := range wantedMetrics { + if !found { + t.Errorf("%s was *not* found in RediSearch metrics but expected", want) + } + } + } else if !tst.wantSearchMetrics { + for want, found := range wantedMetrics { + if found { + t.Errorf("%s was *found* in RediSearch metrics but *not* expected", want) + } + } + } + } +} diff --git a/main.go b/main.go index 90215d56..fa996277 100644 --- a/main.go +++ b/main.go @@ -88,6 +88,7 @@ func main() { setClientName = flag.Bool("set-client-name", getEnvBool("REDIS_EXPORTER_SET_CLIENT_NAME", true), "Whether to set client name to redis_exporter") isTile38 = flag.Bool("is-tile38", getEnvBool("REDIS_EXPORTER_IS_TILE38", false), "Whether to scrape Tile38 specific metrics") isCluster = flag.Bool("is-cluster", getEnvBool("REDIS_EXPORTER_IS_CLUSTER", false), "Whether this is a redis cluster (Enable this if you need to fetch key level data on a Redis Cluster).") + isSearch = flag.Bool("is-search", getEnvBool("REDIS_EXPORTER_IS_SEARCH", false), "Whether to scrape RediSearch module specific metrics") exportClientList = flag.Bool("export-client-list", getEnvBool("REDIS_EXPORTER_EXPORT_CLIENT_LIST", false), "Whether to scrape Client List specific metrics") exportClientPort = flag.Bool("export-client-port", getEnvBool("REDIS_EXPORTER_EXPORT_CLIENT_PORT", false), "Whether to include the client's port when exporting the client list. Warning: including the port increases the number of metrics generated and will make your Prometheus server take up more memory") showVersion = flag.Bool("version", false, "Show version information and exit") @@ -182,6 +183,7 @@ func main() { SetClientName: *setClientName, IsTile38: *isTile38, IsCluster: *isCluster, + IsSearch: *isSearch, ExportClientList: *exportClientList, ExportClientsInclPort: *exportClientPort, SkipTLSVerification: *skipTLSVerification, From 31ff648e77ce3fa209c50b5406b4dc7f41c55531 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Wed, 11 Sep 2024 12:05:43 +0300 Subject: [PATCH 02/10] Fix redis-search port for tests --- Makefile | 2 +- docker-compose.yml | 2 +- exporter/search_test.go | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index daad096e..74c9322a 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ test: TEST_REDIS_CLUSTER_PASSWORD_URI="redis://localhost:17006" \ TEST_TILE38_URI="redis://localhost:19851" \ TEST_REDIS_SENTINEL_URI="redis://localhost:26379" \ - TEST_REDIS_SEARCH_URI="redis://redis-search:6379" \ + TEST_REDIS_SEARCH_URI="redis://localhost:36379" \ go test -v -covermode=atomic -cover -race -coverprofile=coverage.txt -p 1 ./... .PHONY: lint diff --git a/docker-compose.yml b/docker-compose.yml index 7c0321de..a1e803bb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -87,4 +87,4 @@ services: redis-search: image: redis/redis-stack-server:7.4.0-v0 ports: - - "7379:6379" + - "36379:6379" diff --git a/exporter/search_test.go b/exporter/search_test.go index 59a5361f..69bc0bde 100644 --- a/exporter/search_test.go +++ b/exporter/search_test.go @@ -20,12 +20,12 @@ func TestSearch(t *testing.T) { }{ {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: true, wantSearchMetrics: true}, {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: false, wantSearchMetrics: false}, - {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: true, wantSearchMetrics: false}, - {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: false, wantSearchMetrics: false}, + {addr: os.Getenv("TEST_REDIS_URI"), isSearch: true, wantSearchMetrics: false}, + {addr: os.Getenv("TEST_REDIS_URI"), isSearch: false, wantSearchMetrics: false}, } for _, tst := range tsts { - e, _ := NewRedisExporter(tst.addr, Options{Namespace: "test", IsTile38: tst.isSearch}) + e, _ := NewRedisExporter(tst.addr, Options{Namespace: "test", IsSearch: tst.isSearch}) chM := make(chan prometheus.Metric) go func() { From 4dffe834df6f14b767dd3254b23efe5b68172e1d Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Wed, 11 Sep 2024 14:24:07 +0300 Subject: [PATCH 03/10] Switch to universal data collection for modules --- Makefile | 2 +- README.md | 2 +- docker-compose.yml | 2 +- exporter/exporter.go | 9 +++--- exporter/http_test.go | 2 +- exporter/{search.go => modules.go} | 17 ++++------- exporter/{search_test.go => modules_test.go} | 30 ++++++++++---------- main.go | 4 +-- 8 files changed, 31 insertions(+), 37 deletions(-) rename exporter/{search.go => modules.go} (53%) rename exporter/{search_test.go => modules_test.go} (53%) diff --git a/Makefile b/Makefile index 74c9322a..aa4f3184 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ test: TEST_REDIS_CLUSTER_PASSWORD_URI="redis://localhost:17006" \ TEST_TILE38_URI="redis://localhost:19851" \ TEST_REDIS_SENTINEL_URI="redis://localhost:26379" \ - TEST_REDIS_SEARCH_URI="redis://localhost:36379" \ + TEST_REDIS_MODULES_URI="redis://localhost:36379" \ go test -v -covermode=atomic -cover -race -coverprofile=coverage.txt -p 1 ./... .PHONY: lint diff --git a/README.md b/README.md index 70c3d9aa..2cc1cdd2 100644 --- a/README.md +++ b/README.md @@ -161,12 +161,12 @@ Prometheus uses file watches and all changes to the json file are applied immedi | redis-only-metrics | REDIS_EXPORTER_REDIS_ONLY_METRICS | Whether to also export go runtime metrics, defaults to false. | | include-config-metrics | REDIS_EXPORTER_INCL_CONFIG_METRICS | Whether to include all config settings as metrics, defaults to false. | | include-system-metrics | REDIS_EXPORTER_INCL_SYSTEM_METRICS | Whether to include system metrics like `total_system_memory_bytes`, defaults to false. | +| include-modules-metrics | REDIS_EXPORTER_INCL_MODULES_METRICS | Whether to collect Redis Modules metrics, defaults to false. | | exclude-latency-histogram-metrics | REDIS_EXPORTER_EXCLUDE_LATENCY_HISTOGRAM_METRICS | Do not try to collect latency histogram metrics (to avoid `WARNING, LOGGED ONCE ONLY: cmd LATENCY HISTOGRAM` error on Redis < v7). | | redact-config-metrics | REDIS_EXPORTER_REDACT_CONFIG_METRICS | Whether to redact config settings that include potentially sensitive information like passwords. | | ping-on-connect | REDIS_EXPORTER_PING_ON_CONNECT | Whether to ping the redis instance after connecting and record the duration as a metric, defaults to false. | | is-tile38 | REDIS_EXPORTER_IS_TILE38 | Whether to scrape Tile38 specific metrics, defaults to false. | | is-cluster | REDIS_EXPORTER_IS_CLUSTER | Whether this is a redis cluster (Enable this if you need to fetch key level data on a Redis Cluster). | -| is-search | REDIS_EXPORTER_IS_SEARCH | Whether this is a redis has RediSearch module enabled and metrics should be collected. | | export-client-list | REDIS_EXPORTER_EXPORT_CLIENT_LIST | Whether to scrape Client List specific metrics, defaults to false. | | export-client-port | REDIS_EXPORTER_EXPORT_CLIENT_PORT | Whether to include the client's port when exporting the client list. Warning: including the port increases the number of metrics generated and will make your Prometheus server take up more memory | | skip-tls-verification | REDIS_EXPORTER_SKIP_TLS_VERIFICATION | Whether to to skip TLS verification when the exporter connects to a Redis instance | diff --git a/docker-compose.yml b/docker-compose.yml index a1e803bb..25da5938 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -84,7 +84,7 @@ services: ports: - "19851:9851" - redis-search: + redis-stack: image: redis/redis-stack-server:7.4.0-v0 ports: - "36379:6379" diff --git a/exporter/exporter.go b/exporter/exporter.go index 791836b7..f3648958 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -65,7 +65,7 @@ type Options struct { ClientKeyFile string CaCertFile string InclConfigMetrics bool - IsSearch bool + InclModulesMetrics bool DisableExportingKeyValues bool ExcludeLatencyHistogramMetrics bool RedactConfigMetrics bool @@ -269,7 +269,8 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { "long_lock_waits": "long_lock_waits_total", "current_client_thread": "current_client_thread", - // RediSearch module metrics + // Redis Modules metrics + // RediSearch module "search_number_of_indexes": "search_number_of_indexes", "search_used_memory_indexes": "search_used_memory_indexes", "search_total_indexing_time": "search_total_indexing_time", @@ -709,8 +710,8 @@ func (e *Exporter) scrapeRedisHost(ch chan<- prometheus.Metric) error { e.extractTile38Metrics(ch, c) } - if e.options.IsSearch { - e.extractSearchMetrics(ch, c) + if e.options.InclModulesMetrics { + e.extractModuleMetrics(ch, c) } if len(e.options.LuaScript) > 0 { diff --git a/exporter/http_test.go b/exporter/http_test.go index ef8565fc..203ab143 100644 --- a/exporter/http_test.go +++ b/exporter/http_test.go @@ -210,7 +210,7 @@ func TestSimultaneousMetricsHttpRequests(t *testing.T) { os.Getenv("TEST_REDIS5_URI"), os.Getenv("TEST_REDIS6_URI"), - os.Getenv("TEST_REDIS_SEARCH_URI"), + os.Getenv("TEST_REDIS_MODULES_URI"), // tile38 & Cluster need to be last in this list so we can identify them when selected, down in line 229 os.Getenv("TEST_REDIS_CLUSTER_MASTER_URI"), diff --git a/exporter/search.go b/exporter/modules.go similarity index 53% rename from exporter/search.go rename to exporter/modules.go index dcb69d87..965cd20a 100644 --- a/exporter/search.go +++ b/exporter/modules.go @@ -8,21 +8,14 @@ import ( log "github.com/sirupsen/logrus" ) -func (e *Exporter) extractSearchMetrics(ch chan<- prometheus.Metric, c redis.Conn) { - for _, section := range [5]string{"search_version", "search_index", "search_memory", "search_cursors", "search_gc"} { - info, err := redis.String(doRedisCmd(c, "INFO", section)) - if err != nil { - log.Errorf("extractSearchMetrics() err: %s", err) - return - } - e.registerSearchMetrics(ch, info) +func (e *Exporter) extractModuleMetrics(ch chan<- prometheus.Metric, c redis.Conn) { + info, err := redis.String(doRedisCmd(c, "INFO", "MODULES")) + if err != nil { + log.Errorf("extractSearchMetrics() err: %s", err) + return } -} - -func (e *Exporter) registerSearchMetrics(ch chan<- prometheus.Metric, info string) { lines := strings.Split(info, "\r\n") - for _, line := range lines { log.Debugf("info: %s", line) diff --git a/exporter/search_test.go b/exporter/modules_test.go similarity index 53% rename from exporter/search_test.go rename to exporter/modules_test.go index 69bc0bde..19f67111 100644 --- a/exporter/search_test.go +++ b/exporter/modules_test.go @@ -8,24 +8,24 @@ import ( "github.com/prometheus/client_golang/prometheus" ) -func TestSearch(t *testing.T) { - if os.Getenv("TEST_REDIS_SEARCH_URI") == "" { - t.Skipf("TEST_REDIS_SEARCH_URI not set - skipping") +func TestModules(t *testing.T) { + if os.Getenv("TEST_REDIS_MODULES_URI") == "" { + t.Skipf("TEST_REDIS_MODULES_URI not set - skipping") } tsts := []struct { - addr string - isSearch bool - wantSearchMetrics bool + addr string + inclModulesMetrics bool + wantModulesMetrics bool }{ - {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: true, wantSearchMetrics: true}, - {addr: os.Getenv("TEST_REDIS_SEARCH_URI"), isSearch: false, wantSearchMetrics: false}, - {addr: os.Getenv("TEST_REDIS_URI"), isSearch: true, wantSearchMetrics: false}, - {addr: os.Getenv("TEST_REDIS_URI"), isSearch: false, wantSearchMetrics: false}, + {addr: os.Getenv("TEST_REDIS_MODULES_URI"), inclModulesMetrics: true, wantModulesMetrics: true}, + {addr: os.Getenv("TEST_REDIS_MODULES_URI"), inclModulesMetrics: false, wantModulesMetrics: false}, + {addr: os.Getenv("TEST_REDIS_URI"), inclModulesMetrics: true, wantModulesMetrics: false}, + {addr: os.Getenv("TEST_REDIS_URI"), inclModulesMetrics: false, wantModulesMetrics: false}, } for _, tst := range tsts { - e, _ := NewRedisExporter(tst.addr, Options{Namespace: "test", IsSearch: tst.isSearch}) + e, _ := NewRedisExporter(tst.addr, Options{Namespace: "test", InclModulesMetrics: tst.inclModulesMetrics}) chM := make(chan prometheus.Metric) go func() { @@ -52,16 +52,16 @@ func TestSearch(t *testing.T) { } } - if tst.wantSearchMetrics { + if tst.wantModulesMetrics { for want, found := range wantedMetrics { if !found { - t.Errorf("%s was *not* found in RediSearch metrics but expected", want) + t.Errorf("%s was *not* found in Redis Modules metrics but expected", want) } } - } else if !tst.wantSearchMetrics { + } else if !tst.wantModulesMetrics { for want, found := range wantedMetrics { if found { - t.Errorf("%s was *found* in RediSearch metrics but *not* expected", want) + t.Errorf("%s was *found* in Redis Modules metrics but *not* expected", want) } } } diff --git a/main.go b/main.go index fa996277..fdb1bc07 100644 --- a/main.go +++ b/main.go @@ -88,13 +88,13 @@ func main() { setClientName = flag.Bool("set-client-name", getEnvBool("REDIS_EXPORTER_SET_CLIENT_NAME", true), "Whether to set client name to redis_exporter") isTile38 = flag.Bool("is-tile38", getEnvBool("REDIS_EXPORTER_IS_TILE38", false), "Whether to scrape Tile38 specific metrics") isCluster = flag.Bool("is-cluster", getEnvBool("REDIS_EXPORTER_IS_CLUSTER", false), "Whether this is a redis cluster (Enable this if you need to fetch key level data on a Redis Cluster).") - isSearch = flag.Bool("is-search", getEnvBool("REDIS_EXPORTER_IS_SEARCH", false), "Whether to scrape RediSearch module specific metrics") exportClientList = flag.Bool("export-client-list", getEnvBool("REDIS_EXPORTER_EXPORT_CLIENT_LIST", false), "Whether to scrape Client List specific metrics") exportClientPort = flag.Bool("export-client-port", getEnvBool("REDIS_EXPORTER_EXPORT_CLIENT_PORT", false), "Whether to include the client's port when exporting the client list. Warning: including the port increases the number of metrics generated and will make your Prometheus server take up more memory") showVersion = flag.Bool("version", false, "Show version information and exit") redisMetricsOnly = flag.Bool("redis-only-metrics", getEnvBool("REDIS_EXPORTER_REDIS_ONLY_METRICS", false), "Whether to also export go runtime metrics") pingOnConnect = flag.Bool("ping-on-connect", getEnvBool("REDIS_EXPORTER_PING_ON_CONNECT", false), "Whether to ping the redis instance after connecting") inclConfigMetrics = flag.Bool("include-config-metrics", getEnvBool("REDIS_EXPORTER_INCL_CONFIG_METRICS", false), "Whether to include all config settings as metrics") + inclModulesMetrics = flag.Bool("include-modules-metrics", getEnvBool("REDIS_EXPORTER_INCL_MODULES_METRICS", false), "Whether to collect Redis Modules metrics") disableExportingKeyValues = flag.Bool("disable-exporting-key-values", getEnvBool("REDIS_EXPORTER_DISABLE_EXPORTING_KEY_VALUES", false), "Whether to disable values of keys stored in redis as labels or not when using check-keys/check-single-key") excludeLatencyHistogramMetrics = flag.Bool("exclude-latency-histogram-metrics", getEnvBool("REDIS_EXPORTER_EXCLUDE_LATENCY_HISTOGRAM_METRICS", false), "Do not try to collect latency histogram metrics") redactConfigMetrics = flag.Bool("redact-config-metrics", getEnvBool("REDIS_EXPORTER_REDACT_CONFIG_METRICS", true), "Whether to redact config settings that include potentially sensitive information like passwords") @@ -183,7 +183,7 @@ func main() { SetClientName: *setClientName, IsTile38: *isTile38, IsCluster: *isCluster, - IsSearch: *isSearch, + InclModulesMetrics: *inclModulesMetrics, ExportClientList: *exportClientList, ExportClientsInclPort: *exportClientPort, SkipTLSVerification: *skipTLSVerification, From 9ff9169d6f7984e6fcdebc6ef1ba4da7aea1c834 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Wed, 11 Sep 2024 17:52:21 +0300 Subject: [PATCH 04/10] Add more module metrics collection --- exporter/exporter.go | 8 +++++++- exporter/modules.go | 19 +++++++++++++++++-- exporter/modules_test.go | 6 ++++++ 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/exporter/exporter.go b/exporter/exporter.go index f3648958..82b305b6 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -279,6 +279,10 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { "search_bytes_collected": "search_bytes_collected", "search_total_cycles": "search_total_cycles", "search_total_ms_run": "search_total_ms_run", + "search_dialect_1": "search_dialect_1", + "search_dialect_2": "search_dialect_2", + "search_dialect_3": "search_dialect_3", + "search_dialect_4": "search_dialect_4", }, metricMapCounters: map[string]string{ @@ -433,6 +437,8 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { "stream_radix_tree_keys": {txt: `Radix tree keys count"`, lbls: []string{"db", "stream"}}, "stream_radix_tree_nodes": {txt: `Radix tree nodes count`, lbls: []string{"db", "stream"}}, "up": {txt: "Information about the Redis instance"}, + "module_info": {txt: "Information about loaded Redis module", lbls: []string{"name", "ver", "api", "filters", "usedby", "using"}}, + "search_version": {txt: "Information about the RediSearch module", lbls: []string{"version"}}, } { e.metricDescriptions[k] = newMetricDescr(opts.Namespace, k, desc.txt, desc.lbls) } @@ -711,7 +717,7 @@ func (e *Exporter) scrapeRedisHost(ch chan<- prometheus.Metric) error { } if e.options.InclModulesMetrics { - e.extractModuleMetrics(ch, c) + e.extractModulesMetrics(ch, c) } if len(e.options.LuaScript) > 0 { diff --git a/exporter/modules.go b/exporter/modules.go index 965cd20a..26bfe7da 100644 --- a/exporter/modules.go +++ b/exporter/modules.go @@ -8,7 +8,7 @@ import ( log "github.com/sirupsen/logrus" ) -func (e *Exporter) extractModuleMetrics(ch chan<- prometheus.Metric, c redis.Conn) { +func (e *Exporter) extractModulesMetrics(ch chan<- prometheus.Metric, c redis.Conn) { info, err := redis.String(doRedisCmd(c, "INFO", "MODULES")) if err != nil { log.Errorf("extractSearchMetrics() err: %s", err) @@ -20,9 +20,24 @@ func (e *Exporter) extractModuleMetrics(ch chan<- prometheus.Metric, c redis.Con log.Debugf("info: %s", line) split := strings.Split(line, ":") - if len(split) != 2 { + switch { + case split[0] == "module": + module := strings.Split(split[1], ",") + e.registerConstMetricGauge(ch, "module_info", 1, + // response format: 'module:name=search,ver=21005,api=1,filters=0,usedby=[],using=[ReJSON],options=[handle-io-errors]' + strings.Split(module[0], "=")[1], + strings.Split(module[1], "=")[1], + strings.Split(module[2], "=")[1], + strings.Split(module[3], "=")[1], + strings.Split(module[4], "=")[1], + strings.Split(module[5], "=")[1], + ) + case split[0] == "search_version": + e.registerConstMetricGauge(ch, "search_version", 1, split[1]) + case len(split) != 2: continue } + fieldKey := split[0] fieldValue := split[1] diff --git a/exporter/modules_test.go b/exporter/modules_test.go index 19f67111..46ebee68 100644 --- a/exporter/modules_test.go +++ b/exporter/modules_test.go @@ -34,6 +34,8 @@ func TestModules(t *testing.T) { }() wantedMetrics := map[string]bool{ + "module_info": false, + "search_version": false, "search_number_of_indexes": false, "search_used_memory_indexes": false, "search_total_indexing_time": false, @@ -42,6 +44,10 @@ func TestModules(t *testing.T) { "search_bytes_collected": false, "search_total_cycles": false, "search_total_ms_run": false, + "search_dialect_1": false, + "search_dialect_2": false, + "search_dialect_3": false, + "search_dialect_4": false, } for m := range chM { From d2717347e028e08db87a809a7a62e19ef3768c36 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Thu, 19 Sep 2024 16:17:40 +0200 Subject: [PATCH 05/10] Add continue after registerConstMetricGauge in module switch --- exporter/modules.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/exporter/modules.go b/exporter/modules.go index 26bfe7da..cdcdd05b 100644 --- a/exporter/modules.go +++ b/exporter/modules.go @@ -32,8 +32,10 @@ func (e *Exporter) extractModulesMetrics(ch chan<- prometheus.Metric, c redis.Co strings.Split(module[4], "=")[1], strings.Split(module[5], "=")[1], ) + continue case split[0] == "search_version": e.registerConstMetricGauge(ch, "search_version", 1, split[1]) + continue case len(split) != 2: continue } From d97d711ea34e31f57c756d264ca61939d93fee80 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Thu, 19 Sep 2024 16:18:50 +0200 Subject: [PATCH 06/10] Add _bytes suffix to search_used_memory_indexes --- exporter/exporter.go | 2 +- exporter/modules_test.go | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/exporter/exporter.go b/exporter/exporter.go index 82b305b6..bb22a1ea 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -272,7 +272,7 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { // Redis Modules metrics // RediSearch module "search_number_of_indexes": "search_number_of_indexes", - "search_used_memory_indexes": "search_used_memory_indexes", + "search_used_memory_indexes": "search_used_memory_indexes_bytes", "search_total_indexing_time": "search_total_indexing_time", "search_global_idle": "search_global_idle", "search_global_total": "search_global_total", diff --git a/exporter/modules_test.go b/exporter/modules_test.go index 46ebee68..8e262f40 100644 --- a/exporter/modules_test.go +++ b/exporter/modules_test.go @@ -34,20 +34,20 @@ func TestModules(t *testing.T) { }() wantedMetrics := map[string]bool{ - "module_info": false, - "search_version": false, - "search_number_of_indexes": false, - "search_used_memory_indexes": false, - "search_total_indexing_time": false, - "search_global_idle": false, - "search_global_total": false, - "search_bytes_collected": false, - "search_total_cycles": false, - "search_total_ms_run": false, - "search_dialect_1": false, - "search_dialect_2": false, - "search_dialect_3": false, - "search_dialect_4": false, + "module_info": false, + "search_version": false, + "search_number_of_indexes": false, + "search_used_memory_indexes_bytes": false, + "search_total_indexing_time": false, + "search_global_idle": false, + "search_global_total": false, + "search_bytes_collected": false, + "search_total_cycles": false, + "search_total_ms_run": false, + "search_dialect_1": false, + "search_dialect_2": false, + "search_dialect_3": false, + "search_dialect_4": false, } for m := range chM { From 8d36dd58d8683903398289c0f7d4a3840889f64b Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Sat, 21 Sep 2024 23:38:47 +0200 Subject: [PATCH 07/10] Remove search_version metric --- exporter/exporter.go | 1 - exporter/modules.go | 3 --- exporter/modules_test.go | 1 - 3 files changed, 5 deletions(-) diff --git a/exporter/exporter.go b/exporter/exporter.go index bb22a1ea..63351d1f 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -438,7 +438,6 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { "stream_radix_tree_nodes": {txt: `Radix tree nodes count`, lbls: []string{"db", "stream"}}, "up": {txt: "Information about the Redis instance"}, "module_info": {txt: "Information about loaded Redis module", lbls: []string{"name", "ver", "api", "filters", "usedby", "using"}}, - "search_version": {txt: "Information about the RediSearch module", lbls: []string{"version"}}, } { e.metricDescriptions[k] = newMetricDescr(opts.Namespace, k, desc.txt, desc.lbls) } diff --git a/exporter/modules.go b/exporter/modules.go index cdcdd05b..c46d12ed 100644 --- a/exporter/modules.go +++ b/exporter/modules.go @@ -33,9 +33,6 @@ func (e *Exporter) extractModulesMetrics(ch chan<- prometheus.Metric, c redis.Co strings.Split(module[5], "=")[1], ) continue - case split[0] == "search_version": - e.registerConstMetricGauge(ch, "search_version", 1, split[1]) - continue case len(split) != 2: continue } diff --git a/exporter/modules_test.go b/exporter/modules_test.go index 8e262f40..deb92268 100644 --- a/exporter/modules_test.go +++ b/exporter/modules_test.go @@ -35,7 +35,6 @@ func TestModules(t *testing.T) { wantedMetrics := map[string]bool{ "module_info": false, - "search_version": false, "search_number_of_indexes": false, "search_used_memory_indexes_bytes": false, "search_total_indexing_time": false, From d125fa4edb9b2e5258916346ca8092adc48f9e20 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Sun, 22 Sep 2024 00:03:33 +0200 Subject: [PATCH 08/10] Add checks for correctlness of data after split --- exporter/modules.go | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/exporter/modules.go b/exporter/modules.go index c46d12ed..3efa6d36 100644 --- a/exporter/modules.go +++ b/exporter/modules.go @@ -20,11 +20,17 @@ func (e *Exporter) extractModulesMetrics(ch chan<- prometheus.Metric, c redis.Co log.Debugf("info: %s", line) split := strings.Split(line, ":") - switch { - case split[0] == "module": + if len(split) != 2 { + continue + } + + if split[0] == "module" { + // module format: 'module:name=search,ver=21005,api=1,filters=0,usedby=[],using=[ReJSON],options=[handle-io-errors]' module := strings.Split(split[1], ",") + if len(module) != 7 { + continue + } e.registerConstMetricGauge(ch, "module_info", 1, - // response format: 'module:name=search,ver=21005,api=1,filters=0,usedby=[],using=[ReJSON],options=[handle-io-errors]' strings.Split(module[0], "=")[1], strings.Split(module[1], "=")[1], strings.Split(module[2], "=")[1], @@ -33,8 +39,6 @@ func (e *Exporter) extractModulesMetrics(ch chan<- prometheus.Metric, c redis.Co strings.Split(module[5], "=")[1], ) continue - case len(split) != 2: - continue } fieldKey := split[0] From 7baa35fb330871cb85ec1409307091729f74fb34 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Sun, 22 Sep 2024 08:21:40 +0200 Subject: [PATCH 09/10] Set units for 3 more metrics --- exporter/exporter.go | 6 +++--- exporter/modules_test.go | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/exporter/exporter.go b/exporter/exporter.go index 63351d1f..6abafbc6 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -273,12 +273,12 @@ func NewRedisExporter(redisURI string, opts Options) (*Exporter, error) { // RediSearch module "search_number_of_indexes": "search_number_of_indexes", "search_used_memory_indexes": "search_used_memory_indexes_bytes", - "search_total_indexing_time": "search_total_indexing_time", + "search_total_indexing_time": "search_total_indexing_time_ms", "search_global_idle": "search_global_idle", "search_global_total": "search_global_total", - "search_bytes_collected": "search_bytes_collected", + "search_bytes_collected": "search_collected_bytes", "search_total_cycles": "search_total_cycles", - "search_total_ms_run": "search_total_ms_run", + "search_total_ms_run": "search_total_run_ms", "search_dialect_1": "search_dialect_1", "search_dialect_2": "search_dialect_2", "search_dialect_3": "search_dialect_3", diff --git a/exporter/modules_test.go b/exporter/modules_test.go index deb92268..6e6631d8 100644 --- a/exporter/modules_test.go +++ b/exporter/modules_test.go @@ -37,12 +37,12 @@ func TestModules(t *testing.T) { "module_info": false, "search_number_of_indexes": false, "search_used_memory_indexes_bytes": false, - "search_total_indexing_time": false, + "search_total_indexing_time_ms": false, "search_global_idle": false, "search_global_total": false, - "search_bytes_collected": false, + "search_collected_bytes": false, "search_total_cycles": false, - "search_total_ms_run": false, + "search_total_run_ms": false, "search_dialect_1": false, "search_dialect_2": false, "search_dialect_3": false, From 9e7822e716a6dcacf5210039fc9092a85a69d8c9 Mon Sep 17 00:00:00 2001 From: Nicolai Antiferov Date: Mon, 30 Sep 2024 10:11:24 +0300 Subject: [PATCH 10/10] Clarify comment about module format --- exporter/modules.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/exporter/modules.go b/exporter/modules.go index 3efa6d36..2fe82a70 100644 --- a/exporter/modules.go +++ b/exporter/modules.go @@ -25,7 +25,7 @@ func (e *Exporter) extractModulesMetrics(ch chan<- prometheus.Metric, c redis.Co } if split[0] == "module" { - // module format: 'module:name=search,ver=21005,api=1,filters=0,usedby=[],using=[ReJSON],options=[handle-io-errors]' + // module format: 'module:name=,ver=21005,api=1,filters=0,usedby=[],using=[],options=[]' module := strings.Split(split[1], ",") if len(module) != 7 { continue