From 3a29d0fd2eea8cfa46c139d58cbdc3ef722f84c2 Mon Sep 17 00:00:00 2001
From: Martin Majlis <122797378+martin-majlis-s1@users.noreply.github.com>
Date: Tue, 28 Nov 2023 15:57:59 +0100
Subject: [PATCH] [datasetexporter]: Upgrade library to 0.17.0 (#29446)
Upgrade to new version of the library.
This PR is implementing following issues:
* #27650 - metrics are not collected via open telemetry, so they can be
monitored. It's better version of the previous PR #27487 which was not
working.
* #27652 - it's configurable with the `debug` option whether
`session_key` is included or not
Other change is that fields that are specified as part of the `group_by`
configuration are now transferred as part of the session info.
**Link to tracking Issue:** #27650, #27652
**Testing:**
1. Build docker image - make docker-otelcontribcol
2. Checkout https://github.com/open-telemetry/opentelemetry-demo
3. Update configuration in `docker-compose.yaml` and in the
`src/otelcollector/otelcol-config.yml`:
* In `docker-compose.yaml` switch image to the newly build one in step 1
* In `docker-compose.yaml` enable feature gate for collecting metrics -
`--feature-gates=telemetry.useOtelForInternalMetrics`
* In `src/otelcollector/otelcol-config.yml` enable metrics scraping by
prometheus
* In `src/otelcollector/otelcol-config.yml` add configuration for
dataset
```diff
diff --git a/docker-compose.yml b/docker-compose.yml
index 001f7c8..d7edd0d 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -646,14 +646,16 @@ services:
# OpenTelemetry Collector
otelcol:
- image: otel/opentelemetry-collector-contrib:0.86.0
+ image: otelcontribcol:latest
container_name: otel-col
deploy:
resources:
limits:
memory: 125M
restart: unless-stopped
- command: [ "--config=/etc/otelcol-config.yml", "--config=/etc/otelcol-config-extras.yml" ]
+ command: [ "--config=/etc/otelcol-config.yml", "--config=/etc/otelcol-config-extras.yml", "--feature-gates=telemetry.useOtelForInternalMetrics" ]
volumes:
- ./src/otelcollector/otelcol-config.yml:/etc/otelcol-config.yml
- ./src/otelcollector/otelcol-config-extras.yml:/etc/otelcol-config-extras.yml
diff --git a/src/otelcollector/otelcol-config.yml b/src/otelcollector/otelcol-config.yml
index f2568ae..9944562 100644
--- a/src/otelcollector/otelcol-config.yml
+++ b/src/otelcollector/otelcol-config.yml
@@ -15,6 +15,14 @@ receivers:
targets:
- endpoint: http://frontendproxy:${env:ENVOY_PORT}
+ prometheus:
+ config:
+ scrape_configs:
+ - job_name: 'otel-collector'
+ scrape_interval: 5s
+ static_configs:
+ - targets: ['0.0.0.0:8888']
+
exporters:
debug:
otlp:
@@ -29,6 +37,22 @@ exporters:
endpoint: "http://prometheus:9090/api/v1/otlp"
tls:
insecure: true
+ logging:
+ dataset:
+ api_key: API_KEY
+ dataset_url: https://SERVER.scalyr.com
+ debug: true
+ buffer:
+ group_by:
+ - resource_name
+ - resource_type
+ logs:
+ export_resource_info_on_event: true
+ server_host:
+ server_host: Martin
+ use_hostname: false
+ dataset/aaa:
+ api_key: API_KEY
+ dataset_url: https://SERVER.scalyr.com
+ debug: true
+ buffer:
+ group_by:
+ - resource_name
+ - resource_type
+ logs:
+ export_resource_info_on_event: true
+ server_host:
+ server_host: MartinAAA
+ use_hostname: false
processors:
batch:
@@ -47,6 +71,11 @@ processors:
- set(description, "") where name == "queueSize"
# FIXME: remove when this issue is resolved: https://github.com/open-telemetry/opentelemetry-python-contrib/issues/1958
- set(description, "") where name == "http.client.duration"
+ attributes:
+ actions:
+ - key: otel.demo
+ value: 29446
+ action: upsert
connectors:
spanmetrics:
@@ -55,13 +84,13 @@ service:
pipelines:
traces:
receivers: [otlp]
- processors: [batch]
- exporters: [otlp, debug, spanmetrics]
+ processors: [batch, attributes]
+ exporters: [otlp, debug, spanmetrics, dataset, dataset/aaa]
metrics:
- receivers: [httpcheck/frontendproxy, otlp, spanmetrics]
+ receivers: [httpcheck/frontendproxy, otlp, spanmetrics, prometheus]
processors: [filter/ottl, transform, batch]
exporters: [otlphttp/prometheus, debug]
logs:
receivers: [otlp]
- processors: [batch]
- exporters: [otlp/logs, debug]
+ processors: [batch, attributes]
+ exporters: [otlp/logs, debug, dataset, dataset/aaa]
```
4. Run the demo - `docker compose up --abort-on-container-exit`
5. Check, that metrics are in Grafana -
http://localhost:8080/grafana/explore?
6. Check some metrics
![Screenshot 2023-11-22 at 14 06
56](https://github.com/open-telemetry/opentelemetry-collector-contrib/assets/122797378/81306486-eb5e-49b1-87ed-25d1eb8afcf8)
7. Check that data are available in dataset ![Screenshot 2023-11-22 at
13 33
50](https://github.com/open-telemetry/opentelemetry-collector-contrib/assets/122797378/77cb2f31-be14-463b-91a7-fd10f8dbfe3a)
**Documentation:**
**Library changes:**
* Group By & Debug - https://github.com/scalyr/dataset-go/pull/62
* Metrics - https://github.com/scalyr/dataset-go/pull/61
---------
Co-authored-by: Andrzej Stencel
---
.../datasetexporter-update-to-0.16.0.yaml | 27 ++
cmd/configschema/go.mod | 2 +-
cmd/configschema/go.sum | 4 +-
cmd/otelcontribcol/go.mod | 2 +-
cmd/otelcontribcol/go.sum | 4 +-
exporter/datasetexporter/README.md | 43 ++-
exporter/datasetexporter/config.go | 8 +-
exporter/datasetexporter/config_test.go | 3 +-
exporter/datasetexporter/datasetexporter.go | 15 +-
exporter/datasetexporter/factory.go | 1 +
exporter/datasetexporter/factory_test.go | 3 +-
exporter/datasetexporter/go.mod | 3 +-
exporter/datasetexporter/go.sum | 6 +-
exporter/datasetexporter/logs_exporter.go | 2 +-
.../datasetexporter/logs_exporter_test.go | 329 +++++++++++-------
exporter/datasetexporter/testdata/config.yaml | 1 +
exporter/datasetexporter/traces_exporter.go | 4 +-
go.mod | 2 +-
go.sum | 4 +-
19 files changed, 301 insertions(+), 162 deletions(-)
create mode 100755 .chloggen/datasetexporter-update-to-0.16.0.yaml
diff --git a/.chloggen/datasetexporter-update-to-0.16.0.yaml b/.chloggen/datasetexporter-update-to-0.16.0.yaml
new file mode 100755
index 000000000000..5b4053cc98cc
--- /dev/null
+++ b/.chloggen/datasetexporter-update-to-0.16.0.yaml
@@ -0,0 +1,27 @@
+# Use this changelog template to create an entry for release notes.
+
+# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
+change_type: enhancement
+
+# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
+component: datasetexporter
+
+# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
+note: Collect usage metrics with Otel and send grouped attributes in session info.
+
+# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
+issues: [27650, 27652]
+
+# (Optional) One or more lines of additional information to render under the primary note.
+# These lines will be padded with 2 spaces and then inserted directly into the document.
+# Use pipe (|) for multiline entries.
+subtext:
+
+# If your change doesn't affect end users or the exported elements of any package,
+# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
+# Optional: The change log or logs in which this entry should be included.
+# e.g. '[user]' or '[user, api]'
+# Include 'user' if the change is relevant to end users.
+# Include 'api' if there is a change to a library API.
+# Default: '[user]'
+change_logs: []
diff --git a/cmd/configschema/go.mod b/cmd/configschema/go.mod
index 7009dbf85942..e9150ca493cf 100644
--- a/cmd/configschema/go.mod
+++ b/cmd/configschema/go.mod
@@ -572,7 +572,7 @@ require (
github.com/sagikazarmark/locafero v0.3.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21 // indirect
- github.com/scalyr/dataset-go v0.14.0 // indirect
+ github.com/scalyr/dataset-go v0.17.0 // indirect
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.7.0 // indirect
github.com/segmentio/asm v1.2.0 // indirect
diff --git a/cmd/configschema/go.sum b/cmd/configschema/go.sum
index b0f1905920e1..fe1cfc71f49a 100644
--- a/cmd/configschema/go.sum
+++ b/cmd/configschema/go.sum
@@ -1399,8 +1399,8 @@ github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21 h1:yWfiTPwYxB0l5fGMhl/G+liULugVIHD9AU77iNLrURQ=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21/go.mod h1:fCa7OJZ/9DRTnOKmxvT6pn+LPWUptQAmHF/SBJUGEcg=
-github.com/scalyr/dataset-go v0.14.0 h1:uRkoUt6LcMcH3VUTjOakQ4aq+1ooJB2t47oqYRUkV/k=
-github.com/scalyr/dataset-go v0.14.0/go.mod h1:+a4BvVyS7mKquK7ySuWh4vygyDBREQrdikdcnABYKFw=
+github.com/scalyr/dataset-go v0.17.0 h1:5YI/VlbLHr4Ui6SegWm0yjZYioypWB68U7nFQBKNOn8=
+github.com/scalyr/dataset-go v0.17.0/go.mod h1:ehHlPsZSgFWxOkud1eKwmKd5bLF9LcUFrU01XuCnh+8=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 h1:RpforrEYXWkmGwJHIGnLZ3tTWStkjVVstwzNGqxX2Ds=
diff --git a/cmd/otelcontribcol/go.mod b/cmd/otelcontribcol/go.mod
index 94504af54ade..18b4dafbc343 100644
--- a/cmd/otelcontribcol/go.mod
+++ b/cmd/otelcontribcol/go.mod
@@ -591,7 +591,7 @@ require (
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/samber/lo v1.38.1 // indirect
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21 // indirect
- github.com/scalyr/dataset-go v0.14.0 // indirect
+ github.com/scalyr/dataset-go v0.17.0 // indirect
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.7.0 // indirect
github.com/segmentio/asm v1.2.0 // indirect
diff --git a/cmd/otelcontribcol/go.sum b/cmd/otelcontribcol/go.sum
index 9e0dc2f3caa3..66dd00a1df8b 100644
--- a/cmd/otelcontribcol/go.sum
+++ b/cmd/otelcontribcol/go.sum
@@ -1395,8 +1395,8 @@ github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21 h1:yWfiTPwYxB0l5fGMhl/G+liULugVIHD9AU77iNLrURQ=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21/go.mod h1:fCa7OJZ/9DRTnOKmxvT6pn+LPWUptQAmHF/SBJUGEcg=
-github.com/scalyr/dataset-go v0.14.0 h1:uRkoUt6LcMcH3VUTjOakQ4aq+1ooJB2t47oqYRUkV/k=
-github.com/scalyr/dataset-go v0.14.0/go.mod h1:+a4BvVyS7mKquK7ySuWh4vygyDBREQrdikdcnABYKFw=
+github.com/scalyr/dataset-go v0.17.0 h1:5YI/VlbLHr4Ui6SegWm0yjZYioypWB68U7nFQBKNOn8=
+github.com/scalyr/dataset-go v0.17.0/go.mod h1:ehHlPsZSgFWxOkud1eKwmKd5bLF9LcUFrU01XuCnh+8=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 h1:RpforrEYXWkmGwJHIGnLZ3tTWStkjVVstwzNGqxX2Ds=
diff --git a/exporter/datasetexporter/README.md b/exporter/datasetexporter/README.md
index 8ac481490d15..e2db8f31ff06 100644
--- a/exporter/datasetexporter/README.md
+++ b/exporter/datasetexporter/README.md
@@ -45,9 +45,10 @@ Make sure to provide the appropriate server host value in the `serverHost` attri
### Optional Settings
+- `debug` (default = false): Adds `session_key` to the server fields. It's useful for debugging throughput issues.
- `buffer`:
- `max_lifetime` (default = 5s): The maximum delay between sending batches from the same source.
- - `group_by` (default = []): The list of attributes based on which events should be grouped.
+ - `group_by` (default = []): The list of attributes based on which events should be grouped. They are moved from the event attributes to the session info and shown as server fields in the UI.
- `retry_initial_interval` (default = 5s): Time to wait after the first failure before retrying.
- `retry_max_interval` (default = 30s): Is the upper bound on backoff.
- `retry_max_elapsed_time` (default = 300s): Is the maximum amount of time spent trying to send a buffer.
@@ -259,8 +260,6 @@ service:
exporters: [dataset/traces]
```
-## Examples
-
### Handling `serverHost` Attribute
Based on the given configuration and scenarios, here's the expected behavior:
@@ -280,3 +279,41 @@ Based on the given configuration and scenarios, here's the expected behavior:
5. Resource: `{}`, Log: `{'attribute.foo': 'Bar'}`, Env: `SERVER_HOST=''`, Hostname: `ip-172-31-27-19`
* Since the attribute `container_id` is not set and the environmental variable `SERVER_HOST` is empty, the `hostname` of the node (`ip-172-31-27-19`) will be used as the fallback value for `serverHost`.
* Used `serverHost` will be `ip-172-31-27-19`.
+
+## Metrics
+
+To enable metrics you have to:
+1. Run collector with enabled feature gate `telemetry.useOtelForInternalMetrics`. This can be done by executing it with one additional parameter - `--feature-gates=telemetry.useOtelForInternalMetrics`.
+2. Enable metrics scraping as part of the configuration and add receiver into services:
+ ```yaml
+ receivers:
+ prometheus:
+ config:
+ scrape_configs:
+ - job_name: 'otel-collector'
+ scrape_interval: 5s
+ static_configs:
+ - targets: ['0.0.0.0:8888']
+ ...
+ service:
+ pipelines:
+ metrics:
+ # add prometheus among metrics receivers
+ receivers: [prometheus]
+ processors: [batch]
+ exporters: [otlphttp/prometheus, debug]
+ ```
+
+### Available Metrics
+
+Available metrics contain `dataset` in their name. There are counters related to the
+number of processed events (`events`), buffers (`buffer`), and transferred bytes (`bytes`).
+There are also histograms related to response times (`responseTime`) and payload size (`payloadSize`).
+
+There are several counters related to events/buffers:
+* `enqueued` - the number of received entities
+* `processed` - the number of entities that were accepted by the next layer
+* `dropped` - the number of entities that were not accepted by the next layer
+* `broken` - the number of entities that were somehow corrupted during processing (should be 0)
+
+The number of entities, that are still in the queue can be computed as `enqueued - (processed + dropped + broken)`.
diff --git a/exporter/datasetexporter/config.go b/exporter/datasetexporter/config.go
index c497086434be..4db73a648bd1 100644
--- a/exporter/datasetexporter/config.go
+++ b/exporter/datasetexporter/config.go
@@ -144,9 +144,12 @@ func newDefaultServerHostSettings() ServerHostSettings {
}
}
+const debugDefault = false
+
type Config struct {
DatasetURL string `mapstructure:"dataset_url"`
APIKey configopaque.String `mapstructure:"api_key"`
+ Debug bool `mapstructure:"debug"`
BufferSettings `mapstructure:"buffer"`
TracesSettings `mapstructure:"traces"`
LogsSettings `mapstructure:"logs"`
@@ -180,8 +183,11 @@ func (c *Config) Validate() error {
// String returns a string representation of the Config object.
// It includes all the fields and their values in the format "field_name: field_value".
func (c *Config) String() string {
+ apiKey, _ := c.APIKey.MarshalText()
s := ""
s += fmt.Sprintf("%s: %s; ", "DatasetURL", c.DatasetURL)
+ s += fmt.Sprintf("%s: %s (%d); ", "APIKey", apiKey, len(c.APIKey))
+ s += fmt.Sprintf("%s: %t; ", "Debug", c.Debug)
s += fmt.Sprintf("%s: %+v; ", "BufferSettings", c.BufferSettings)
s += fmt.Sprintf("%s: %+v; ", "LogsSettings", c.LogsSettings)
s += fmt.Sprintf("%s: %+v; ", "TracesSettings", c.TracesSettings)
@@ -189,7 +195,6 @@ func (c *Config) String() string {
s += fmt.Sprintf("%s: %+v; ", "RetrySettings", c.RetrySettings)
s += fmt.Sprintf("%s: %+v; ", "QueueSettings", c.QueueSettings)
s += fmt.Sprintf("%s: %+v", "TimeoutSettings", c.TimeoutSettings)
-
return s
}
@@ -218,6 +223,7 @@ func (c *Config) convert() (*ExporterConfig, error) {
UseHostName: c.ServerHostSettings.UseHostName,
ServerHost: c.ServerHostSettings.ServerHost,
},
+ Debug: c.Debug,
},
tracesSettings: c.TracesSettings,
logsSettings: c.LogsSettings,
diff --git a/exporter/datasetexporter/config_test.go b/exporter/datasetexporter/config_test.go
index af99f0cf7f93..3a019974b039 100644
--- a/exporter/datasetexporter/config_test.go
+++ b/exporter/datasetexporter/config_test.go
@@ -108,6 +108,7 @@ func TestConfigString(t *testing.T) {
config := Config{
DatasetURL: "https://example.com",
APIKey: "secret",
+ Debug: true,
BufferSettings: BufferSettings{
MaxLifetime: 123,
GroupBy: []string{"field1", "field2"},
@@ -140,7 +141,7 @@ func TestConfigString(t *testing.T) {
}
assert.Equal(t,
- "DatasetURL: https://example.com; BufferSettings: {MaxLifetime:123ns GroupBy:[field1 field2] RetryInitialInterval:0s RetryMaxInterval:0s RetryMaxElapsedTime:0s RetryShutdownTimeout:0s}; LogsSettings: {ExportResourceInfo:true ExportResourcePrefix:AAA ExportScopeInfo:true ExportScopePrefix:BBB DecomposeComplexMessageField:true DecomposedComplexMessagePrefix:EEE exportSettings:{ExportSeparator:CCC ExportDistinguishingSuffix:DDD}}; TracesSettings: {exportSettings:{ExportSeparator:TTT ExportDistinguishingSuffix:UUU}}; ServerHostSettings: {UseHostName:false ServerHost:foo-bar}; RetrySettings: {Enabled:true InitialInterval:5s RandomizationFactor:0.5 Multiplier:1.5 MaxInterval:30s MaxElapsedTime:5m0s}; QueueSettings: {Enabled:true NumConsumers:10 QueueSize:1000 StorageID:}; TimeoutSettings: {Timeout:5s}",
+ "DatasetURL: https://example.com; APIKey: [REDACTED] (6); Debug: true; BufferSettings: {MaxLifetime:123ns GroupBy:[field1 field2] RetryInitialInterval:0s RetryMaxInterval:0s RetryMaxElapsedTime:0s RetryShutdownTimeout:0s}; LogsSettings: {ExportResourceInfo:true ExportResourcePrefix:AAA ExportScopeInfo:true ExportScopePrefix:BBB DecomposeComplexMessageField:true DecomposedComplexMessagePrefix:EEE exportSettings:{ExportSeparator:CCC ExportDistinguishingSuffix:DDD}}; TracesSettings: {exportSettings:{ExportSeparator:TTT ExportDistinguishingSuffix:UUU}}; ServerHostSettings: {UseHostName:false ServerHost:foo-bar}; RetrySettings: {Enabled:true InitialInterval:5s RandomizationFactor:0.5 Multiplier:1.5 MaxInterval:30s MaxElapsedTime:5m0s}; QueueSettings: {Enabled:true NumConsumers:10 QueueSize:1000 StorageID:}; TimeoutSettings: {Timeout:5s}",
config.String(),
)
}
diff --git a/exporter/datasetexporter/datasetexporter.go b/exporter/datasetexporter/datasetexporter.go
index 4767c588d4c4..70c962730b14 100644
--- a/exporter/datasetexporter/datasetexporter.go
+++ b/exporter/datasetexporter/datasetexporter.go
@@ -14,15 +14,14 @@ import (
"github.com/google/uuid"
"github.com/scalyr/dataset-go/pkg/api/add_events"
"github.com/scalyr/dataset-go/pkg/client"
+ "github.com/scalyr/dataset-go/pkg/meter_config"
"go.opentelemetry.io/collector/exporter"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.uber.org/zap"
- "golang.org/x/time/rate"
)
type DatasetExporter struct {
client *client.DataSetClient
- limiter *rate.Limiter
logger *zap.Logger
session string
exporterCfg *ExporterConfig
@@ -34,6 +33,8 @@ func newDatasetExporter(entity string, config *Config, set exporter.CreateSettin
logger.Info("Creating new DataSetExporter",
zap.String("config", config.String()),
zap.String("entity", entity),
+ zap.String("id.string", set.ID.String()),
+ zap.String("id.name", set.ID.Name()),
)
exporterCfg, err := config.convert()
if err != nil {
@@ -48,11 +49,20 @@ func newDatasetExporter(entity string, config *Config, set exporter.CreateSettin
set.BuildInfo.Version,
entity,
)
+
+ meter := set.MeterProvider.Meter("datasetexporter")
+ meterConfig := meter_config.NewMeterConfig(
+ &meter,
+ entity,
+ set.ID.Name(),
+ )
+
client, err := client.NewClient(
exporterCfg.datasetConfig,
&http.Client{Timeout: time.Second * 60},
logger,
&userAgent,
+ meterConfig,
)
if err != nil {
logger.Error("Cannot create DataSetClient: ", zap.Error(err))
@@ -61,7 +71,6 @@ func newDatasetExporter(entity string, config *Config, set exporter.CreateSettin
return &DatasetExporter{
client: client,
- limiter: rate.NewLimiter(100*rate.Every(1*time.Minute), 100), // 100 requests / minute
session: uuid.New().String(),
logger: logger,
exporterCfg: exporterCfg,
diff --git a/exporter/datasetexporter/factory.go b/exporter/datasetexporter/factory.go
index 600cc0f1fa83..5864f52cfc75 100644
--- a/exporter/datasetexporter/factory.go
+++ b/exporter/datasetexporter/factory.go
@@ -32,6 +32,7 @@ func createDefaultConfig() component.Config {
RetrySettings: exporterhelper.NewDefaultRetrySettings(),
QueueSettings: exporterhelper.NewDefaultQueueSettings(),
TimeoutSettings: exporterhelper.NewDefaultTimeoutSettings(),
+ Debug: debugDefault,
}
}
diff --git a/exporter/datasetexporter/factory_test.go b/exporter/datasetexporter/factory_test.go
index 2e3d3f0dab65..bda8aa484cdc 100644
--- a/exporter/datasetexporter/factory_test.go
+++ b/exporter/datasetexporter/factory_test.go
@@ -84,6 +84,7 @@ func TestLoadConfig(t *testing.T) {
expected: &Config{
DatasetURL: "https://app.scalyr.com",
APIKey: "key-full",
+ Debug: true,
BufferSettings: BufferSettings{
MaxLifetime: 3456 * time.Millisecond,
GroupBy: []string{"body.map.kubernetes.pod_id", "body.map.kubernetes.docker_id", "body.map.stream"},
@@ -160,7 +161,7 @@ func createExporterTests() []CreateTest {
{
name: "broken",
config: &Config{},
- expectedError: fmt.Errorf("cannot get DataSetExpoter: cannot convert config: DatasetURL: ; BufferSettings: {MaxLifetime:0s GroupBy:[] RetryInitialInterval:0s RetryMaxInterval:0s RetryMaxElapsedTime:0s RetryShutdownTimeout:0s}; LogsSettings: {ExportResourceInfo:false ExportResourcePrefix: ExportScopeInfo:false ExportScopePrefix: DecomposeComplexMessageField:false DecomposedComplexMessagePrefix: exportSettings:{ExportSeparator: ExportDistinguishingSuffix:}}; TracesSettings: {exportSettings:{ExportSeparator: ExportDistinguishingSuffix:}}; ServerHostSettings: {UseHostName:false ServerHost:}; RetrySettings: {Enabled:false InitialInterval:0s RandomizationFactor:0 Multiplier:0 MaxInterval:0s MaxElapsedTime:0s}; QueueSettings: {Enabled:false NumConsumers:0 QueueSize:0 StorageID:}; TimeoutSettings: {Timeout:0s}; config is not valid: api_key is required"),
+ expectedError: fmt.Errorf("cannot get DataSetExporter: cannot convert config: DatasetURL: ; APIKey: [REDACTED] (0); Debug: false; BufferSettings: {MaxLifetime:0s GroupBy:[] RetryInitialInterval:0s RetryMaxInterval:0s RetryMaxElapsedTime:0s RetryShutdownTimeout:0s}; LogsSettings: {ExportResourceInfo:false ExportResourcePrefix: ExportScopeInfo:false ExportScopePrefix: DecomposeComplexMessageField:false DecomposedComplexMessagePrefix: exportSettings:{ExportSeparator: ExportDistinguishingSuffix:}}; TracesSettings: {exportSettings:{ExportSeparator: ExportDistinguishingSuffix:}}; ServerHostSettings: {UseHostName:false ServerHost:}; RetrySettings: {Enabled:false InitialInterval:0s RandomizationFactor:0 Multiplier:0 MaxInterval:0s MaxElapsedTime:0s}; QueueSettings: {Enabled:false NumConsumers:0 QueueSize:0 StorageID:}; TimeoutSettings: {Timeout:0s}; config is not valid: api_key is required"),
},
{
name: "valid",
diff --git a/exporter/datasetexporter/go.mod b/exporter/datasetexporter/go.mod
index c84ad508f83e..58a21fde069a 100644
--- a/exporter/datasetexporter/go.mod
+++ b/exporter/datasetexporter/go.mod
@@ -6,14 +6,13 @@ require (
github.com/google/uuid v1.4.0
// github.com/open-telemetry/opentelemetry-collector-contrib/extension/storage/filestorage v0.77.0
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.89.0
- github.com/scalyr/dataset-go v0.14.0
+ github.com/scalyr/dataset-go v0.17.0
github.com/stretchr/testify v1.8.4
go.opentelemetry.io/collector/component v0.89.1-0.20231127181443-575c5f5e2531
go.opentelemetry.io/collector/confmap v0.89.1-0.20231127181443-575c5f5e2531
go.opentelemetry.io/collector/exporter v0.89.1-0.20231127181443-575c5f5e2531
go.opentelemetry.io/collector/pdata v1.0.0-rcv0018.0.20231127181443-575c5f5e2531
go.uber.org/zap v1.26.0
- golang.org/x/time v0.4.0
)
diff --git a/exporter/datasetexporter/go.sum b/exporter/datasetexporter/go.sum
index 7087a91a8afd..7b81348d84fb 100644
--- a/exporter/datasetexporter/go.sum
+++ b/exporter/datasetexporter/go.sum
@@ -87,8 +87,8 @@ github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lne
github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI=
github.com/prometheus/statsd_exporter v0.22.7 h1:7Pji/i2GuhK6Lu7DHrtTkFmNBCudCPT1pX2CziuyQR0=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
-github.com/scalyr/dataset-go v0.14.0 h1:uRkoUt6LcMcH3VUTjOakQ4aq+1ooJB2t47oqYRUkV/k=
-github.com/scalyr/dataset-go v0.14.0/go.mod h1:+a4BvVyS7mKquK7ySuWh4vygyDBREQrdikdcnABYKFw=
+github.com/scalyr/dataset-go v0.17.0 h1:5YI/VlbLHr4Ui6SegWm0yjZYioypWB68U7nFQBKNOn8=
+github.com/scalyr/dataset-go v0.17.0/go.mod h1:ehHlPsZSgFWxOkud1eKwmKd5bLF9LcUFrU01XuCnh+8=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
@@ -176,8 +176,6 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/time v0.4.0 h1:Z81tqI5ddIoXDPvVQ7/7CC9TnLM7ubaFG2qXYd5BbYY=
-golang.org/x/time v0.4.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
diff --git a/exporter/datasetexporter/logs_exporter.go b/exporter/datasetexporter/logs_exporter.go
index 8ff1a98892e1..43dec4ab96e5 100644
--- a/exporter/datasetexporter/logs_exporter.go
+++ b/exporter/datasetexporter/logs_exporter.go
@@ -43,7 +43,7 @@ func createLogsExporter(ctx context.Context, set exporter.CreateSettings, config
cfg := castConfig(config)
e, err := newDatasetExporter("logs", cfg, set)
if err != nil {
- return nil, fmt.Errorf("cannot get DataSetExpoter: %w", err)
+ return nil, fmt.Errorf("cannot get DataSetExporter: %w", err)
}
return exporterhelper.NewLogsExporter(
diff --git a/exporter/datasetexporter/logs_exporter_test.go b/exporter/datasetexporter/logs_exporter_test.go
index 49eb45b47740..57b2ad699af0 100644
--- a/exporter/datasetexporter/logs_exporter_test.go
+++ b/exporter/datasetexporter/logs_exporter_test.go
@@ -11,7 +11,9 @@ import (
"io"
"net/http"
"net/http/httptest"
+ "sort"
"strconv"
+ "sync"
"sync/atomic"
"testing"
"time"
@@ -780,12 +782,15 @@ func TestConsumeLogsShouldSucceed(t *testing.T) {
attempt := atomic.Uint64{}
wasSuccessful := atomic.Bool{}
- addRequest := add_events.AddEventsRequest{}
+ addRequests := []add_events.AddEventsRequest{}
+ lock := sync.Mutex{}
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
attempt.Add(1)
cer, err := extract(req)
- addRequest = cer
+ lock.Lock()
+ addRequests = append(addRequests, cer)
+ lock.Unlock()
assert.NoError(t, err, "Error reading request: %v", err)
@@ -804,8 +809,9 @@ func TestConsumeLogsShouldSucceed(t *testing.T) {
config := &Config{
DatasetURL: server.URL,
APIKey: "key-lib",
+ Debug: true,
BufferSettings: BufferSettings{
- MaxLifetime: 500 * time.Millisecond,
+ MaxLifetime: 2 * time.Second,
GroupBy: []string{"attributes.container_id"},
RetryInitialInterval: time.Second,
RetryMaxInterval: time.Minute,
@@ -881,151 +887,204 @@ func TestConsumeLogsShouldSucceed(t *testing.T) {
}
assert.True(t, wasSuccessful.Load())
+ assert.Equal(t, uint64(4), attempt.Load())
+
+ sort.SliceStable(addRequests, func(i, j int) bool {
+ if addRequests[i].Session == addRequests[j].Session {
+ return len(addRequests[i].Events) < len(addRequests[j].Events)
+ }
+ return addRequests[i].Session < addRequests[j].Session
+ })
+
assert.Equal(t,
- add_events.AddEventsRequest{
- AuthParams: request.AuthParams{
- Token: "key-lib",
- },
- AddEventsRequestParams: add_events.AddEventsRequestParams{
- Session: addRequest.Session,
- SessionInfo: addRequest.SessionInfo,
- Events: []*add_events.Event{
- {
- Thread: testLEventReq.Thread,
- Log: testLEventReq.Log,
- Sev: testLEventReq.Sev,
- Ts: testLEventReq.Ts,
- Attrs: map[string]any{
- add_events.AttrOrigServerHost: testServerHost,
- "app": "server",
- "instance_num": float64(1),
- "dropped_attributes_count": float64(1),
- "message": "This is a log message",
- "span_id": "0102040800000000",
- "trace_id": "08040201000000000000000000000000",
- "bundle_key": "d41d8cd98f00b204e9800998ecf8427e",
-
- "R#resource-attr": "resource-attr-val-1",
- },
+ []add_events.AddEventsRequest{
+ {
+ AuthParams: request.AuthParams{
+ Token: "key-lib",
+ },
+ AddEventsRequestParams: add_events.AddEventsRequestParams{
+ Session: addRequests[0].Session,
+ SessionInfo: &add_events.SessionInfo{
+ add_events.AttrServerHost: "serverHostFromAttribute",
+ add_events.AttrSessionKey: "0296b9a57cb379df0f35aaf2d23500d3",
},
- {
- Thread: testLEventReq.Thread,
- Log: testLEventReq.Log,
- Sev: testLEventReq.Sev,
- Ts: testLEventReq.Ts,
- Attrs: map[string]any{
- add_events.AttrOrigServerHost: "serverHostFromAttribute",
- "app": "server",
- "instance_num": float64(1),
- "dropped_attributes_count": float64(1),
- "message": "This is a log message",
- "span_id": "0102040800000000",
- "trace_id": "08040201000000000000000000000000",
- "bundle_key": "d41d8cd98f00b204e9800998ecf8427e",
-
- "R#resource-attr": "resource-attr-val-1",
- "R#serverHost": "serverHostFromResource",
+ Events: []*add_events.Event{
+ {
+ Thread: testLEventReq.Thread,
+ Log: testLEventReq.Log,
+ Sev: testLEventReq.Sev,
+ Ts: testLEventReq.Ts,
+ Attrs: map[string]any{
+ "app": "server",
+ "instance_num": float64(1),
+ "dropped_attributes_count": float64(1),
+ "message": "This is a log message",
+ "span_id": "0102040800000000",
+ "trace_id": "08040201000000000000000000000000",
+
+ "R#resource-attr": "resource-attr-val-1",
+ "R#serverHost": "serverHostFromResource",
+ },
+ ServerHost: "",
},
},
- {
- Thread: testLEventReq.Thread,
- Log: testLEventReq.Log,
- Sev: testLEventReq.Sev,
- Ts: testLEventReq.Ts,
- Attrs: map[string]any{
- add_events.AttrOrigServerHost: "serverHostFromResourceServer",
- "app": "server",
- "instance_num": float64(1),
- "dropped_attributes_count": float64(1),
- "message": "This is a log message",
- "span_id": "0102040800000000",
- "trace_id": "08040201000000000000000000000000",
- "bundle_key": "d41d8cd98f00b204e9800998ecf8427e",
-
- "R#resource-attr": "resource-attr-val-1",
- "R#host.name": "serverHostFromResourceHost",
- "R#serverHost": "serverHostFromResourceServer",
+ Threads: []*add_events.Thread{testLThread},
+ Logs: []*add_events.Log{testLLog},
+ },
+ },
+ {
+ AuthParams: request.AuthParams{
+ Token: "key-lib",
+ },
+ AddEventsRequestParams: add_events.AddEventsRequestParams{
+ Session: addRequests[1].Session,
+ SessionInfo: &add_events.SessionInfo{
+ add_events.AttrServerHost: "serverHostFromResourceHost",
+ add_events.AttrSessionKey: "73b97897d80d89c9a09a3ee6ed178650",
+ },
+ Events: []*add_events.Event{
+ {
+ Thread: testLEventReq.Thread,
+ Log: testLEventReq.Log,
+ Sev: testLEventReq.Sev,
+ Ts: testLEventReq.Ts,
+ Attrs: map[string]any{
+ "app": "server",
+ "instance_num": float64(1),
+ "dropped_attributes_count": float64(1),
+ "message": "This is a log message",
+ "span_id": "0102040800000000",
+ "trace_id": "08040201000000000000000000000000",
+
+ "R#resource-attr": "resource-attr-val-1",
+ "R#host.name": "serverHostFromResourceHost",
+ },
},
},
- {
- Thread: testLEventReq.Thread,
- Log: testLEventReq.Log,
- Sev: testLEventReq.Sev,
- Ts: testLEventReq.Ts,
- Attrs: map[string]any{
- add_events.AttrOrigServerHost: "serverHostFromResourceHost",
- "app": "server",
- "instance_num": float64(1),
- "dropped_attributes_count": float64(1),
- "message": "This is a log message",
- "span_id": "0102040800000000",
- "trace_id": "08040201000000000000000000000000",
- "bundle_key": "d41d8cd98f00b204e9800998ecf8427e",
-
- "R#resource-attr": "resource-attr-val-1",
- "R#host.name": "serverHostFromResourceHost",
+ Threads: []*add_events.Thread{testLThread},
+ Logs: []*add_events.Log{testLLog},
+ },
+ },
+ {
+ AuthParams: request.AuthParams{
+ Token: "key-lib",
+ },
+ AddEventsRequestParams: add_events.AddEventsRequestParams{
+ Session: addRequests[2].Session,
+ SessionInfo: &add_events.SessionInfo{
+ add_events.AttrServerHost: "serverHostFromResourceServer",
+ add_events.AttrSessionKey: "770e22b433d2e9a31fa9a81abf3b9b87",
+ },
+ Events: []*add_events.Event{
+ {
+ Thread: testLEventReq.Thread,
+ Log: testLEventReq.Log,
+ Sev: testLEventReq.Sev,
+ Ts: testLEventReq.Ts,
+ Attrs: map[string]any{
+ "app": "server",
+ "instance_num": float64(1),
+ "dropped_attributes_count": float64(1),
+ "message": "This is a log message",
+ "span_id": "0102040800000000",
+ "trace_id": "08040201000000000000000000000000",
+
+ "R#resource-attr": "resource-attr-val-1",
+ "R#host.name": "serverHostFromResourceHost",
+ "R#serverHost": "serverHostFromResourceServer",
+ },
},
},
- {
- Thread: testLEventReq.Thread,
- Log: testLEventReq.Log,
- Sev: testLEventReq.Sev,
- Ts: testLEventReq.Ts,
- Attrs: map[string]any{
- add_events.AttrOrigServerHost: testServerHost,
- "app": "server",
- "instance_num": float64(1),
- "dropped_attributes_count": float64(1),
- "message": "This is a log message",
- "span_id": "0102040800000000",
- "trace_id": "08040201000000000000000000000000",
- "bundle_key": "d41d8cd98f00b204e9800998ecf8427e",
-
- "string": "stringA",
- "double": 2.0,
- "bool": true,
- "empty": nil,
- "int": float64(3),
- "map#map_empty": nil,
- "map#map_string": "map_stringA",
- "map#map_map#map_map_string": "map_map_stringA",
- "slice#0": "slice_stringA",
- "name": "filled_nameA",
- "span_id_": "filled_span_idA",
-
- "S#string": "stringS",
- "S#double": 2.0,
- "S#bool": true,
- "S#empty": nil,
- "S#int": float64(3),
- "S#map#map_empty": nil,
- "S#map#map_string": "map_stringS",
- "S#map#map_map#map_map_string": "map_map_stringS",
- "S#slice#0": "slice_stringS",
- "S#name": "filled_nameS",
- "S#span_id": "filled_span_idS",
-
- "R#string": "stringR",
- "R#double": 2.0,
- "R#bool": true,
- "R#empty": nil,
- "R#int": float64(3),
- "R#map#map_empty": nil,
- "R#map#map_string": "map_stringR",
- "R#map#map_map#map_map_string": "map_map_stringR",
- "R#slice#0": "slice_stringR",
- "R#name": "filled_nameR",
- "R#span_id": "filled_span_idR",
-
- "R#resource-attr": "resource-attr-val-1",
+ Threads: []*add_events.Thread{testLThread},
+ Logs: []*add_events.Log{testLLog},
+ },
+ },
+ {
+ AuthParams: request.AuthParams{
+ Token: "key-lib",
+ },
+ AddEventsRequestParams: add_events.AddEventsRequestParams{
+ Session: addRequests[3].Session,
+ SessionInfo: &add_events.SessionInfo{
+ add_events.AttrServerHost: "foo",
+ add_events.AttrSessionKey: "caedd419dc354c24a69aac7508890ec1",
+ },
+ Events: []*add_events.Event{
+ {
+ Thread: testLEventReq.Thread,
+ Log: testLEventReq.Log,
+ Sev: testLEventReq.Sev,
+ Ts: testLEventReq.Ts,
+ Attrs: map[string]any{
+ "app": "server",
+ "instance_num": float64(1),
+ "dropped_attributes_count": float64(1),
+ "message": "This is a log message",
+ "span_id": "0102040800000000",
+ "trace_id": "08040201000000000000000000000000",
+
+ "R#resource-attr": "resource-attr-val-1",
+ },
+ },
+ {
+ Thread: testLEventReq.Thread,
+ Log: testLEventReq.Log,
+ Sev: testLEventReq.Sev,
+ Ts: testLEventReq.Ts,
+ Attrs: map[string]any{
+ "app": "server",
+ "instance_num": float64(1),
+ "dropped_attributes_count": float64(1),
+ "message": "This is a log message",
+ "span_id": "0102040800000000",
+ "trace_id": "08040201000000000000000000000000",
+
+ "string": "stringA",
+ "double": 2.0,
+ "bool": true,
+ "empty": nil,
+ "int": float64(3),
+ "map#map_empty": nil,
+ "map#map_string": "map_stringA",
+ "map#map_map#map_map_string": "map_map_stringA",
+ "slice#0": "slice_stringA",
+ "name": "filled_nameA",
+ "span_id_": "filled_span_idA",
+
+ "S#string": "stringS",
+ "S#double": 2.0,
+ "S#bool": true,
+ "S#empty": nil,
+ "S#int": float64(3),
+ "S#map#map_empty": nil,
+ "S#map#map_string": "map_stringS",
+ "S#map#map_map#map_map_string": "map_map_stringS",
+ "S#slice#0": "slice_stringS",
+ "S#name": "filled_nameS",
+ "S#span_id": "filled_span_idS",
+
+ "R#string": "stringR",
+ "R#double": 2.0,
+ "R#bool": true,
+ "R#empty": nil,
+ "R#int": float64(3),
+ "R#map#map_empty": nil,
+ "R#map#map_string": "map_stringR",
+ "R#map#map_map#map_map_string": "map_map_stringR",
+ "R#slice#0": "slice_stringR",
+ "R#name": "filled_nameR",
+ "R#span_id": "filled_span_idR",
+
+ "R#resource-attr": "resource-attr-val-1",
+ },
},
},
+ Threads: []*add_events.Thread{testLThread},
+ Logs: []*add_events.Log{testLLog},
},
- Threads: []*add_events.Thread{testLThread},
- Logs: []*add_events.Log{testLLog},
},
},
- addRequest,
+ addRequests,
)
}
diff --git a/exporter/datasetexporter/testdata/config.yaml b/exporter/datasetexporter/testdata/config.yaml
index 9ffe4d29a733..262597ba4bf4 100644
--- a/exporter/datasetexporter/testdata/config.yaml
+++ b/exporter/datasetexporter/testdata/config.yaml
@@ -14,6 +14,7 @@ dataset/lib:
dataset/full:
dataset_url: https://app.scalyr.com
api_key: key-full
+ debug: true
buffer:
max_lifetime: 3456ms
group_by:
diff --git a/exporter/datasetexporter/traces_exporter.go b/exporter/datasetexporter/traces_exporter.go
index 5ee1024e591c..ea352b7e02f5 100644
--- a/exporter/datasetexporter/traces_exporter.go
+++ b/exporter/datasetexporter/traces_exporter.go
@@ -21,9 +21,9 @@ const ServiceNameKey = "service.name"
func createTracesExporter(ctx context.Context, set exporter.CreateSettings, config component.Config) (exporter.Traces, error) {
cfg := castConfig(config)
- e, err := newDatasetExporter("logs", cfg, set)
+ e, err := newDatasetExporter("traces", cfg, set)
if err != nil {
- return nil, fmt.Errorf("cannot get DataSetExpoter: %w", err)
+ return nil, fmt.Errorf("cannot get DataSetExporter: %w", err)
}
return exporterhelper.NewTracesExporter(
diff --git a/go.mod b/go.mod
index 957494dc3d82..86785fed15a4 100644
--- a/go.mod
+++ b/go.mod
@@ -575,7 +575,7 @@ require (
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
github.com/samber/lo v1.38.1 // indirect
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21 // indirect
- github.com/scalyr/dataset-go v0.14.0 // indirect
+ github.com/scalyr/dataset-go v0.17.0 // indirect
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.7.0 // indirect
github.com/segmentio/asm v1.2.0 // indirect
diff --git a/go.sum b/go.sum
index 0626101ec3db..6d8e416c6ab2 100644
--- a/go.sum
+++ b/go.sum
@@ -1404,8 +1404,8 @@ github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21 h1:yWfiTPwYxB0l5fGMhl/G+liULugVIHD9AU77iNLrURQ=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.21/go.mod h1:fCa7OJZ/9DRTnOKmxvT6pn+LPWUptQAmHF/SBJUGEcg=
-github.com/scalyr/dataset-go v0.14.0 h1:uRkoUt6LcMcH3VUTjOakQ4aq+1ooJB2t47oqYRUkV/k=
-github.com/scalyr/dataset-go v0.14.0/go.mod h1:+a4BvVyS7mKquK7ySuWh4vygyDBREQrdikdcnABYKFw=
+github.com/scalyr/dataset-go v0.17.0 h1:5YI/VlbLHr4Ui6SegWm0yjZYioypWB68U7nFQBKNOn8=
+github.com/scalyr/dataset-go v0.17.0/go.mod h1:ehHlPsZSgFWxOkud1eKwmKd5bLF9LcUFrU01XuCnh+8=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 h1:RpforrEYXWkmGwJHIGnLZ3tTWStkjVVstwzNGqxX2Ds=