From 7f2d6ae87b928ff1f28716d2ba062e479d7d2f62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gy=C3=B6rgy=20Krajcsovits?= Date: Fri, 7 Jul 2023 16:43:02 +0200 Subject: [PATCH] Update convertBucketsLayout and its unit tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a new parameter that will be used to tell the function how many buckets to merge when converting the bucket layouts. Not use yet. Add disabled test cases. Signed-off-by: György Krajcsovits --- .../prometheusremotewrite/histograms.go | 8 +- .../prometheusremotewrite/histograms_test.go | 331 +++++++++++++++--- 2 files changed, 294 insertions(+), 45 deletions(-) diff --git a/pkg/translator/prometheusremotewrite/histograms.go b/pkg/translator/prometheusremotewrite/histograms.go index 293d318a3c58..34ce65729191 100644 --- a/pkg/translator/prometheusremotewrite/histograms.go +++ b/pkg/translator/prometheusremotewrite/histograms.go @@ -65,8 +65,8 @@ func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prom // TODO: downscale to 8 if scale > 8 } - pSpans, pDeltas := convertBucketsLayout(p.Positive()) - nSpans, nDeltas := convertBucketsLayout(p.Negative()) + pSpans, pDeltas := convertBucketsLayout(p.Positive(), 1) + nSpans, nDeltas := convertBucketsLayout(p.Negative(), 1) h := prompb.Histogram{ Schema: scale, @@ -104,7 +104,9 @@ func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prom // The bucket indexes conversion was adjusted, since OTel exp. histogram bucket // index 0 corresponds to the range (1, base] while Prometheus bucket index 0 // to the range (base 1]. -func convertBucketsLayout(buckets pmetric.ExponentialHistogramDataPointBuckets) ([]prompb.BucketSpan, []int64) { +// +// scaleMerge is the number of buckets to merge into a single bucket - must be power of 2 +func convertBucketsLayout(buckets pmetric.ExponentialHistogramDataPointBuckets, _ int32) ([]prompb.BucketSpan, []int64) { bucketCounts := buckets.BucketCounts() if bucketCounts.Len() == 0 { return nil, nil diff --git a/pkg/translator/prometheusremotewrite/histograms_test.go b/pkg/translator/prometheusremotewrite/histograms_test.go index 6020d1ffda5b..2cdf73811009 100644 --- a/pkg/translator/prometheusremotewrite/histograms_test.go +++ b/pkg/translator/prometheusremotewrite/histograms_test.go @@ -4,6 +4,7 @@ package prometheusremotewrite import ( + "fmt" "testing" "time" @@ -17,12 +18,16 @@ import ( prometheustranslator "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/prometheus" ) +type expectedBucketLayout struct { + wantSpans []prompb.BucketSpan + wantDeltas []int64 +} + func TestConvertBucketsLayout(t *testing.T) { tests := []struct { name string buckets func() pmetric.ExponentialHistogramDataPointBuckets - wantSpans []prompb.BucketSpan - wantDeltas []int64 + wantLayout map[int32]expectedBucketLayout }{ { name: "zero offset", @@ -32,13 +37,75 @@ func TestConvertBucketsLayout(t *testing.T) { b.BucketCounts().FromRaw([]uint64{4, 3, 2, 1}) return b }, - wantSpans: []prompb.BucketSpan{ - { - Offset: 1, - Length: 4, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 1, + Length: 4, + }, + }, + wantDeltas: []int64{4, -1, -1, -1}, + }, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 1, + Length: 2, + }, + }, + // 4+3, 2+1 = 7, 3 =delta= 7, -4 + wantDeltas: []int64{4, -4}, + }, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 1, + Length: 1, + }, + }, + // 4+3+2+1 = 10 =delta= 10 + wantDeltas: []int64{10}, + }, + }, + }, + { + name: "offset 1", + buckets: func() pmetric.ExponentialHistogramDataPointBuckets { + b := pmetric.NewExponentialHistogramDataPointBuckets() + b.SetOffset(1) + b.BucketCounts().FromRaw([]uint64{4, 3, 2, 1}) + return b + }, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 2, + Length: 4, + }, + }, + wantDeltas: []int64{4, -1, -1, -1}, + }, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 1, + Length: 3, + }, + }, + wantDeltas: []int64{4, 1, -4}, // 0+4, 3+2, 1+0 = 4, 5, 1 + }, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 1, + Length: 2, + }, + }, + wantDeltas: []int64{9, -8}, // 0+4+3+2, 1+0+0+0 = 9, 1 }, }, - wantDeltas: []int64{4, -1, -1, -1}, }, { name: "positive offset", @@ -48,17 +115,103 @@ func TestConvertBucketsLayout(t *testing.T) { b.BucketCounts().FromRaw([]uint64{4, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}) return b }, - wantSpans: []prompb.BucketSpan{ - { - Offset: 5, - Length: 4, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 5, + Length: 4, + }, + { + Offset: 12, + Length: 1, + }, + }, + wantDeltas: []int64{4, -2, -2, 2, -1}, + }, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 4, + Length: 2, + }, + { + Offset: 6, + Length: 1, + }, + }, + // Downscale: + // 4+2, 0+2, 0+0, 0+0, 0+0, 0+0, 0+0, 0+0, 1+0 = 6, 2, 0, 0, 0, 0, 0, 0, 1 + wantDeltas: []int64{6, -4, -1}, }, - { - Offset: 12, - Length: 1, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 4, + Length: 1, + }, + { + Offset: 3, + Length: 1, + }, + }, + // Downscale: + // 4+2+0+2, 0+0+0+0, 0+0+0+0, 0+0+0+0, 1+0+0+0 = 8, 0, 0, 0, 1 + // Check from sclaing from previous: 6+2, 0+0, 0+0, 0+0, 1+0 = 8, 0, 0, 0, 1 + wantDeltas: []int64{8, -7}, + }, + }, + }, + { + name: "scaledown merges spans", + buckets: func() pmetric.ExponentialHistogramDataPointBuckets { + b := pmetric.NewExponentialHistogramDataPointBuckets() + b.SetOffset(4) + b.BucketCounts().FromRaw([]uint64{4, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1}) + return b + }, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 5, + Length: 4, + }, + { + Offset: 8, + Length: 1, + }, + }, + wantDeltas: []int64{4, -2, -2, 2, -1}, + }, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 4, + Length: 2, + }, + { + Offset: 4, + Length: 1, + }, + }, + // Downscale: + // 4+2, 0+2, 0+0, 0+0, 0+0, 0+0, 1+0 = 6, 2, 0, 0, 0, 0, 1 + wantDeltas: []int64{6, -4, -1}, + }, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: 4, + Length: 4, + }, + }, + // Downscale: + // 4+2+0+2, 0+0+0+0, 0+0+0+0, 1+0+0+0 = 8, 0, 0, 1 + // Check from sclaing from previous: 6+2, 0+0, 0+0, 1+0 = 8, 0, 0, 1 + wantDeltas: []int64{8, -8, 0, 1}, }, }, - wantDeltas: []int64{4, -2, -2, 2, -1}, }, { name: "negative offset", @@ -68,17 +221,43 @@ func TestConvertBucketsLayout(t *testing.T) { b.BucketCounts().FromRaw([]uint64{3, 1, 0, 0, 0, 1}) return b }, - wantSpans: []prompb.BucketSpan{ - { - Offset: -1, - Length: 2, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 2, + }, + { + Offset: 3, + Length: 1, + }, + }, + wantDeltas: []int64{3, -2, 0}, }, - { - Offset: 3, - Length: 1, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 3, + }, + }, + // Downscale: + // 3+1, 0+0, 0+1 = 4, 0, 1 + wantDeltas: []int64{4, -4, 1}, + }, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 2, + }, + }, + // Downscale: + // 0+0+3+1, 0+0+0+0 = 4, 1 + wantDeltas: []int64{4, -3}, }, }, - wantDeltas: []int64{3, -2, 0}, }, { name: "buckets with gaps of size 1", @@ -88,13 +267,39 @@ func TestConvertBucketsLayout(t *testing.T) { b.BucketCounts().FromRaw([]uint64{3, 1, 0, 1, 0, 1}) return b }, - wantSpans: []prompb.BucketSpan{ - { - Offset: -1, - Length: 6, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 6, + }, + }, + wantDeltas: []int64{3, -2, -1, 1, -1, 1}, + }, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 3, + }, + }, + // Downscale: + // 3+1, 0+1, 0+1 = 4, 1, 1 + wantDeltas: []int64{4, -3, 0}, + }, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 2, + }, + }, + // Downscale: + // 0+0+3+1, 0+1+0+1 = 4, 2 + wantDeltas: []int64{4, -2}, }, }, - wantDeltas: []int64{3, -2, -1, 1, -1, 1}, }, { name: "buckets with gaps of size 2", @@ -104,27 +309,69 @@ func TestConvertBucketsLayout(t *testing.T) { b.BucketCounts().FromRaw([]uint64{3, 0, 0, 1, 0, 0, 1}) return b }, - wantSpans: []prompb.BucketSpan{ - { - Offset: -1, - Length: 7, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 7, + }, + }, + wantDeltas: []int64{3, -3, 0, 1, -1, 0, 1}, + }, + 2: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 4, + }, + }, + // Downscale: + // 3+0, 0+1, 0+0, 0+1 = 3, 1, 0, 1 + wantDeltas: []int64{3, -2, -1, 1}, + }, + 4: { + wantSpans: []prompb.BucketSpan{ + { + Offset: -1, + Length: 3, + }, + }, + // Downscale: + // 0+0+3+0, 0+1+0+0, 1+0+0+0 = 3, 1, 1 + wantDeltas: []int64{3, -2, 0}, }, }, - wantDeltas: []int64{3, -3, 0, 1, -1, 0, 1}, }, { - name: "zero buckets", - buckets: pmetric.NewExponentialHistogramDataPointBuckets, - wantSpans: nil, - wantDeltas: nil, + name: "zero buckets", + buckets: pmetric.NewExponentialHistogramDataPointBuckets, + wantLayout: map[int32]expectedBucketLayout{ + 1: { + wantSpans: nil, + wantDeltas: nil, + }, + 2: { + wantSpans: nil, + wantDeltas: nil, + }, + 4: { + wantSpans: nil, + wantDeltas: nil, + }, + }, }, } for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotSpans, gotDeltas := convertBucketsLayout(tt.buckets()) - assert.Equal(t, tt.wantSpans, gotSpans) - assert.Equal(t, tt.wantDeltas, gotDeltas) - }) + for scaleDown, wantLayout := range tt.wantLayout { + if scaleDown == 1 { + t.Run(fmt.Sprintf("%s-scaleby-%d", tt.name, scaleDown), func(t *testing.T) { + gotSpans, gotDeltas := convertBucketsLayout(tt.buckets(), scaleDown) + assert.Equal(t, wantLayout.wantSpans, gotSpans) + assert.Equal(t, wantLayout.wantDeltas, gotDeltas) + }) + } + } } }