Skip to content

Commit

Permalink
Merge pull request #108 from qclaogui:docker-desktop-mixin
Browse files Browse the repository at this point in the history
Add Docker Desktop Mixin
  • Loading branch information
qclaogui authored Apr 20, 2024
2 parents 1fa2457 + da2192f commit d6e65da
Show file tree
Hide file tree
Showing 20 changed files with 4,382 additions and 59 deletions.
101 changes: 101 additions & 0 deletions alloy-modules/compose/integrations/alloy.alloy
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
/*
Module Components: component_alloy
*/

declare "component_alloy" {

/*****************************************************************
* ARGUMENTS
*****************************************************************/
argument "forward_to" {
comment = "Must be a list(MetricssReceiver) where collected metrics should be forwarded to"
}

argument "job_label" {
optional = true
default = "integrations/alloy-check"
}

argument "instance_name" {
optional = true
}

argument "keep_metrics" {
optional = true
default = "(prometheus_target_sync_length_seconds_sum|prometheus_target_scrapes_.*|prometheus_target_interval.*|prometheus_sd_discovered_targets|alloy_build.*|prometheus_remote_write_wal_samples_appended_total|process_start_time_seconds)"
}

argument "scrape_interval" {
comment = "How often to scrape metrics from the targets (default: 60s)"
optional = true
default = "60s"
}

argument "scrape_timeout" {
comment = "How long before a scrape times out (default: 10s)"
optional = true
default = "10s"
}

/***************************************************************
* Integrations cAdvisor
****************************************************************/
prometheus.exporter.self "integrations_alloy" { }

/***************************************************************
* Discovery Relabelings (pre-scrape)
****************************************************************/
discovery.relabel "integrations_alloy" {
targets = prometheus.exporter.unix.integrations_alloy.targets

rule {
target_label = "job"
replacement = argument.job_label.value
}

rule {
target_label = "instance"
replacement = coalesce(argument.instance_name.value, constants.hostname)
}

rule {
target_label = "alloy_hostname"
replacement = constants.hostname
}
}

/***************************************************************
* Prometheus Scrape Integrations Targets
****************************************************************/
prometheus.scrape "integrations_alloy" {
targets = concat(
discovery.relabel.integrations_alloy.output,
)

enable_protobuf_negotiation = true
scrape_classic_histograms = true

scrape_interval = argument.scrape_interval.value
scrape_timeout = argument.scrape_timeout.value

clustering {
enabled = true
}

forward_to = [prometheus.relabel.integrations_alloy.receiver]
}

/***************************************************************
* Prometheus Metric Relabelings (post-scrape)
****************************************************************/
prometheus.relabel "integrations_alloy" {
forward_to = argument.forward_to.value

// keep only metrics that match the keep_metrics regex
rule {
source_labels = ["__name__"]
regex = argument.keep_metrics.value
action = "keep"
}
}
}
49 changes: 11 additions & 38 deletions alloy-modules/compose/integrations/cadvisor.alloy
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ declare "component_cadvisor" {

argument "job_label" {
optional = true
default = "integrations/cadvisor"
default = "integrations/docker/cadvisor"
}

argument "instance_name" {
Expand Down Expand Up @@ -60,8 +60,6 @@ declare "component_cadvisor" {
allowlisted_container_labels = [
"com.docker.compose.project",
"com.docker.compose.service",
"metrics.grafana.com/job",
"prometheus.io/job",
]

// only show stats for docker containers
Expand Down Expand Up @@ -123,29 +121,17 @@ declare "component_cadvisor" {
prometheus.relabel "integrations_cadvisor" {
forward_to = argument.forward_to.value

// drop unused metric label
rule {
action = "labeldrop"
regex = "id"
}

// keep only metrics that match the keep_metrics regex
rule {
source_labels = ["__name__"]
regex = argument.keep_metrics.value
action = "keep"
}
// rule {
// action = "labeldrop"
// regex = "id"
// }

// set a default job label to be the namespace/service_name
rule {
action = "replace"
source_labels = [
"container_label_com_docker_compose_service",
]
regex = "^(?:;*)?([^;]+).*$"
replacement = argument.namespace.value + "/$1"
target_label = "job"
}
// // keep only metrics that match the keep_metrics regex
// rule {
// source_labels = ["__name__"]
// regex = argument.keep_metrics.value
// action = "keep"
// }

rule {
action = "replace"
Expand All @@ -166,18 +152,5 @@ declare "component_cadvisor" {
replacement = "$1"
target_label = "container"
}

// allow resources to declare their the job label value to use when collecting their metrics, the default value is "",
rule {
action = "replace"
source_labels = [
"container_label_metrics_agent_grafana_com_job",
"container_label_prometheus_io_job",
]
separator = ";"
regex = "^(?:;*)?([^;]+).*$"
replacement = "$1"
target_label = "job"
}
}
}
2 changes: 1 addition & 1 deletion alloy-modules/compose/integrations/memcached.alloy
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ declare "component_memcached" {

argument "job_label" {
optional = true
default = "integrations/memcached"
default = "integrations/docker/memcached"
}

argument "instance_name" {
Expand Down
4 changes: 2 additions & 2 deletions alloy-modules/compose/integrations/node-exporter.alloy
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ declare "component_node_exporter" {

argument "job_label" {
optional = true
default = "integrations/node-exporter"
default = "integrations/docker/node-exporter"
}

argument "instance_name" {
Expand All @@ -22,7 +22,7 @@ declare "component_node_exporter" {

argument "keep_metrics" {
optional = true
default = "(up|node_exporter_build_info|node_time_seconds|node_boot_time_seconds|node_load.*|node_cpu.*|node_memory.*|node_disk.*|node_filesystem.*|process_cpu_seconds_total|process_resident_memory_bytes)"
default = "(up|node_uname_info|node_exporter_build_info|node_time_seconds|node_boot_time_seconds|node_load.*|node_cpu.*|node_memory.*|node_disk.*|node_filesystem.*|process_cpu_seconds_total|process_resident_memory_bytes)"
}

argument "scrape_interval" {
Expand Down
1 change: 1 addition & 0 deletions alloy-modules/compose/logs/keep-labels.alloy
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ declare "keep_labels" {
"cluster",
"namespace",
"pod",
"instance",
"container",
"component",
"env",
Expand Down
25 changes: 12 additions & 13 deletions alloy-modules/compose/logs/labels-scrape.alloy
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,21 @@ declare "labels_scrape" {
comment = "Must be a list(LogsReceiver) where collected logs should be forwarded to"
}

argument "job_label" {
optional = true
default = "integrations/docker/cadvisor"
}

argument "instance_name" {
optional = true
}

argument "label_prefix" {
comment = "The label_prefix to use Auto-Scraping (default: logs.grafana.com)"
default = "logs.grafana.com"
optional = true
}

/*
Hidden Arguments
These arguments are used to set reusable variables to avoid repeating logic
*/
argument "__sd_label" {
optional = true
comment = "The logic is used to transform the label_prefix argument into a valid label name by removing unsupported characters."
Expand Down Expand Up @@ -102,14 +107,14 @@ declare "labels_scrape" {
rule {
action = "replace"
source_labels = ["__meta_docker_container_id"]
replacement = "integrations/docker-desktop"
replacement = argument.job_label.value
target_label = "job"
}

rule {
action = "replace"
source_labels = ["__meta_docker_container_id"]
replacement = constants.hostname
replacement = coalesce(argument.instance_name.value, constants.hostname)
target_label = "instance"
}

Expand Down Expand Up @@ -161,13 +166,6 @@ declare "labels_scrape" {
target_label = "namespace"
}

// set a source label
rule {
action = "replace"
replacement = "docker"
target_label = "source"
}

rule {
replacement = "docker"
target_label = "tmp_container_runtime"
Expand All @@ -179,6 +177,7 @@ declare "labels_scrape" {
targets = discovery.relabel.label_logs_filter.output
relabel_rules = discovery.relabel.label_logs_filter.rules

labels = {"source" = "docker"}
forward_to = [loki.process.parse.receiver]
}

Expand Down
5 changes: 1 addition & 4 deletions alloy-modules/compose/metrics/integrations-scrape.alloy
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ declare "integrations_scrape" {
integrate.component_cadvisor "default" {
forward_to = argument.forward_to.value

job_label = "integrations/docker-desktop"
scrape_interval = coalesce(argument.scrape_interval.value, "60s")
scrape_timeout = coalesce(argument.scrape_timeout.value, "10s")
}
Expand All @@ -51,7 +50,6 @@ declare "integrations_scrape" {
integrate.component_node_exporter "default" {
forward_to = argument.forward_to.value

job_label = "integrations/docker-desktop"
scrape_interval = coalesce(argument.scrape_interval.value, "60s")
scrape_timeout = coalesce(argument.scrape_timeout.value, "10s")
}
Expand All @@ -62,11 +60,10 @@ declare "integrations_scrape" {
integrate.component_memcached "primary" {
forward_to = argument.forward_to.value

job_label = "integrations/docker/memcached"
instance_name = "primary"
memcached_address = "memcached:11211"
memcached_timeout = "5s"

instance_name = "primary"
scrape_interval = coalesce(argument.scrape_interval.value, "60s")
scrape_timeout = coalesce(argument.scrape_timeout.value, "10s")
}
Expand Down
2 changes: 1 addition & 1 deletion docker-compose/common/compose-include/mimir.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ services:
- -config.expand-env=true
healthcheck:
test: [ "CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:8080/ready || exit 1" ]
interval: 2s
interval: 3s
timeout: 1s
retries: 15
# expose 38080 port so we can directly access mimir inside container
Expand Down
65 changes: 65 additions & 0 deletions monitoring-mixins/docker-mixin/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
.DEFAULT_GOAL := help

include ../../.bingo/Variables.mk

# path to jsonnetfmt
JSONNET_FMT := $(JSONNETFMT) -n 2 --max-blank-lines 2 --string-style s --comment-style s
# path to the mixin
MIXIN_OUT_PATH := deploy

CURRENT_DIR = $(notdir $(shell pwd))

.PHONY: fmt
fmt: ## Format the mixin files
@find . -type f -name '*.libsonnet' | xargs -n 1 -- $(JSONNET_FMT) -i

.PHONY: update
update: $(JB) ## update mixin
$(JB) update

define build_mixin_func
$(eval $@_MIXIN_OUTPUT = $(1))
@mkdir -p "${$@_MIXIN_OUTPUT}"; \
find "${$@_MIXIN_OUTPUT}" ! -name "kustomization.yaml" ! -name "prometheus-alerts.yaml" ! -name "prometheus-rules.yaml" -type f -delete
@$(MIXTOOL) generate all --output-alerts "${$@_MIXIN_OUTPUT}/alerts.yaml" --output-rules "${$@_MIXIN_OUTPUT}/rules.yaml" --directory "${$@_MIXIN_OUTPUT}/dashboards_out" "${$@_MIXIN_OUTPUT}.libsonnet"
@mv ${$@_MIXIN_OUTPUT}/alerts.yaml ${$@_MIXIN_OUTPUT}/${CURRENT_DIR}-alerts.yaml
@mv ${$@_MIXIN_OUTPUT}/rules.yaml ${$@_MIXIN_OUTPUT}/${CURRENT_DIR}-rules.yaml
endef


.PHONY: build
build: $(MIXTOOL) ## Generates the mixin files
$(foreach mixin,$(MIXIN_OUT_PATH),$(call build_mixin_func, $(mixin)))

.PHONY: check
check: $(MIXTOOL) build fmt ## Build, fmt and check the mixin files
@../../tools/find-diff-or-untracked.sh . "$(MIXIN_OUT_PATH)" || (echo "Please build and fmt mixin by running 'make build fmt'" && false); \
# jb install && \
# $(MIXTOOL) lint mixin.libsonnet

##@ Dashboards & k8s

.PHONY: manifests
manifests: $(KUSTOMIZE) build ## Generates dashboards for k8s
$(KUSTOMIZE) build deploy > deploy/manifests/k8s-all-in-one.yaml

# NOTE: dashboards_out has been deprecated, please use build instead
dashboards_out:
@mkdir -p deploy/dashboards_out deploy/manifests
jsonnet -J vendor -e '(import "mixin.libsonnet").grafanaDashboards' -m deploy/dashboards_out

prom_alerts.yaml:
jsonnet -J vendor -S -e 'std.manifestYamlDoc((import "mixin.libsonnet").prometheusAlerts)' > deploy/$@

prom_rules.yaml:
jsonnet -J vendor -S -e 'std.manifestYamlDoc((import "mixin.libsonnet").prometheusRules)' > deploy/$@

##@ General

.PHONY: help
help: ## Display this help. Thanks to https://www.thapaliya.com/en/writings/well-documented-makefiles/
ifeq ($(OS),Windows_NT)
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make <target>\n"} /^[a-zA-Z_-]+:.*?##/ { printf " %-40s %s\n", $$1, $$2 } /^##@/ { printf "\n%s\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
else
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-40s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
endif
10 changes: 10 additions & 0 deletions monitoring-mixins/docker-mixin/deploy.libsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
(import 'mixin.libsonnet') + {
// Config overrides
_config+:: {
dashboardTags: ['docker'],
dashboardPeriod: 'now-1h',
dashboardTimezone: 'default',
dashboardRefresh: '1m',
enableLokiLogs: true,
},
}
Loading

0 comments on commit d6e65da

Please sign in to comment.