Skip to content

Commit

Permalink
Add some tests & test infra.
Browse files Browse the repository at this point in the history
  • Loading branch information
Seefooo committed Sep 27, 2022
1 parent 30ecfab commit 82979a5
Show file tree
Hide file tree
Showing 3 changed files with 210 additions and 62 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
import re
from typing import Dict, Sequence

from collections import defaultdict
from itertools import chain
import requests
import snappy

Expand All @@ -30,10 +32,12 @@
from opentelemetry.sdk.metrics.export import (
MetricExporter,
MetricExportResult,
AggregationTemporality,
Gauge,
Sum,
Histogram,
MetricExportResult,
MetricsData,
Metric,
)
#from opentelemetry.sdk.metrics.export.aggregate import (
# HistogramAggregator,
Expand Down Expand Up @@ -162,8 +166,8 @@ def headers(self, headers: Dict):
self._headers = headers

def export(
self, export_records: Sequence[ExportRecord]
) -> MetricsExportResult:
self, export_records
) ->MetricExportResult:
if not export_records:
return MetricsExportResult.SUCCESS
timeseries = self._convert_to_timeseries(export_records)
Expand All @@ -181,9 +185,82 @@ def shutdown(self) -> None:

def _translate_data(self, data: MetricsData):
rw_timeseries = []


for resource_metrics in data.resource_metrics:
resource = resource_metrics.resource
# OTLP Data model suggests combining some attrs into job/instance
# Should we do that here?
resource_labels = self._get_resource_labels(resource.attributes)
# Scope name/version probably not too useful from a labeling perspective
for scope_metrics in resource_metrics.scope_metrics:
for metric in scope_metrics.metrics:
rw_timeseries.extend( self._parse_metric(metric,resource_labels) )

def _get_resource_labels(self,attrs):
""" Converts Resource Attributes to Prometheus Labels based on
OTLP Metric Data Model's recommendations on Resource Attributes
"""
return [ (n,str(v)) for n,v in resource.attributes.items() ]

def _parse_metric(self, metric: Metric, resource_labels: Sequence) -> Sequence[TimeSeries]:
"""
Parses the Metric & lower objects, then converts the output into
OM TimeSeries. Returns a List of TimeSeries objects based on one Metric
"""
# datapoints have attributes associated with them. these would be sent
# to RW as different metrics: name & labels is a unique time series
sample_sets = defaultdict(list)
if isinstance(metric.data,(Gauge,Sum)):
for dp in metric.data.data_points:
attrs,sample = self._parse_data_point(dp)
sample_sets[attrs].append(sample)
elif isinstance(metric.data,(HistogramType)):
raise NotImplementedError("Coming sooN!")
else:
logger.warn("Unsupported Metric Type: %s",type(metric.data))
return []

# Create the metric name, will be a label later
if metric.unit:
#Prom. naming guidelines add unit to the name
name =f"{metric.name}_{metric.unit}"
else:
name = metric.name

timeseries = []
for labels, samples in sample_sets.items():
ts = TimeSeries()
ts.labels.append(self._label("__name__",name))
for label_name,label_value in chain(resource_labels,labels):
# Previous implementation did not str() the names...
ts.labels.append(self._label(label_name,str(label_value)))
for value,timestamp in samples:
ts.samples.append(self._sample(value,timestamp))
timeseries.append(ts)
return timeseries

def _sample(self,value,timestamp :int):
sample = Sample()
sample.value = value
sample.timestamp = timestamp
return sample

def _label(self,name:str,value:str):
label = Label()
label.name = name
label.value = value
return label

def _parse_data_point(self, data_point):

attrs = tuple(data_point.attributes.items())
#TODO: Optimize? create Sample here
# remote write time is in milliseconds
sample = (data_point.value,(data_point.time_unix_nano // 1_000_000))
return attrs,sample

def _convert_to_timeseries(
self, export_records: Sequence[ExportRecord]
self, export_records
) -> Sequence[TimeSeries]:
timeseries = []
for export_record in export_records:
Expand All @@ -199,7 +276,7 @@ def _convert_to_timeseries(
return timeseries

def _convert_from_sum(
self, sum_record: ExportRecord
self, sum_record
) -> Sequence[TimeSeries]:
return [
self._create_timeseries(
Expand All @@ -211,22 +288,9 @@ def _convert_from_sum(

def _convert_from_gauge(self, gauge_record):
raise NotImplementedError("Do this")
def _convert_from_min_max_sum_count(
self, min_max_sum_count_record: ExportRecord
) -> Sequence[TimeSeries]:
timeseries = []
for agg_type in ["min", "max", "sum", "count"]:
name = min_max_sum_count_record.instrument.name + "_" + agg_type
value = getattr(
min_max_sum_count_record.aggregator.checkpoint, agg_type
)
timeseries.append(
self._create_timeseries(min_max_sum_count_record, name, value)
)
return timeseries

def _convert_from_histogram(
self, histogram_record: ExportRecord
self, histogram_record
) -> Sequence[TimeSeries]:
timeseries = []
for bound in histogram_record.aggregator.checkpoint.keys():
Expand All @@ -242,43 +306,10 @@ def _convert_from_histogram(
)
return timeseries

def _convert_from_last_value(
self, last_value_record: ExportRecord
) -> Sequence[TimeSeries]:
return [
self._create_timeseries(
last_value_record,
last_value_record.instrument.name + "_last",
last_value_record.aggregator.checkpoint,
)
]

def _convert_from_value_observer(
self, value_observer_record: ExportRecord
) -> Sequence[TimeSeries]:
timeseries = []
for agg_type in ["min", "max", "sum", "count", "last"]:
timeseries.append(
self._create_timeseries(
value_observer_record,
value_observer_record.instrument.name + "_" + agg_type,
getattr(
value_observer_record.aggregator.checkpoint, agg_type
),
)
)
return timeseries

# TODO: Implement convert from quantile once supported by SDK for Prometheus Summaries
def _convert_from_quantile(
self, summary_record: ExportRecord
) -> Sequence[TimeSeries]:
raise NotImplementedError()

# pylint: disable=no-member,no-self-use
def _create_timeseries(
self,
export_record: ExportRecord,
export_record,
name: str,
value: float,
extra_label: (str, str) = None,
Expand Down Expand Up @@ -344,7 +375,7 @@ def _build_headers(self) -> Dict:

def _send_message(
self, message: bytes, headers: Dict
) -> MetricsExportResult:
) -> MetricExportResult:
auth = None
if self.basic_auth:
auth = (self.basic_auth["username"], self.basic_auth["password"])
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@


import random
import pytest

import opentelemetry.test.metrictestutil as metric_util#import _generate_gauge, _generate_sum

from opentelemetry.sdk.metrics.export import (
AggregationTemporality,
Histogram,
HistogramDataPoint,
Sum,
Gauge,
MetricExportResult,
MetricsData,
ResourceMetrics,
ScopeMetrics,
Metric,
)

from opentelemetry.exporter.prometheus_remote_write import (
PrometheusRemoteWriteMetricsExporter,
)
@pytest.fixture
def prom_rw():
return PrometheusRemoteWriteMetricsExporter("http://victoria:8428/api/v1/write")



@pytest.fixture
def generate_metrics_data(data):
pass



@pytest.fixture
def metric_histogram():
dp = HistogramDataPoint(
attributes={"foo": "bar", "baz": 42},
start_time_unix_nano=1641946016139533244,
time_unix_nano=1641946016139533244,
count=random.randint(1,10),
sum=random.randint(42,420),
bucket_counts=[1, 4],
explicit_bounds=[10.0, 20.0],
min=8,
max=18,
)
data = Histogram(
[dp],
AggregationTemporality.CUMULATIVE,
)
return Metric(
"test_histogram",
"foo",
"tu",
data=data,
)

@pytest.fixture
def metric(request):
if request.param == "gauge":
return metric_util._generate_gauge("test_gauge",random.randint(0,100))
elif request.param == "sum":
return metric_util._generate_sum("test_sum",random.randint(0,9_999_999_999))

Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,68 @@
TimeSeries,
)
from opentelemetry.sdk.metrics import Counter
from opentelemetry.sdk.metrics.export import ExportRecord, MetricsExportResult
from opentelemetry.sdk.metrics.export.aggregate import (
HistogramAggregator,
LastValueAggregator,
MinMaxSumCountAggregator,
SumAggregator,
ValueObserverAggregator,
#from opentelemetry.sdk.metrics.export import ExportRecord, MetricExportResult
#from opentelemetry.sdk.metrics.export.aggregate import (
# HistogramAggregator,
# LastValueAggregator,
# MinMaxSumCountAggregator,
# SumAggregator,
# ValueObserverAggregator,
#)

from opentelemetry.sdk.metrics.export import (
NumberDataPoint,
)
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.util import get_dict_as_key

import pytest

def test_parse_data_point(prom_rw):

attrs = {"Foo" : "Bar","Baz" : 42}
timestamp = 1641946016139533244
value = 242.42
dp = NumberDataPoint(
attrs,
0,
timestamp,
value
)
labels, sample = prom_rw._parse_data_point(dp)
assert labels == (("Foo", "Bar"),("Baz", 42))
assert sample == (value,timestamp // 1_000_000)

@pytest.mark.parametrize("metric",[
"gauge",
"sum",
],indirect=["metric"])
def test_parse_metric(metric,prom_rw):
# We have 1 data point & 5 labels total
attributes = {
"service" : "foo",
"id" : 42,
}

series = prom_rw._parse_metric(metric,tuple(attributes.items()))
assert len(series) == 1

#Build out the expected attributes and check they all made it as labels
proto_out = series[0]
number_data_point = metric.data.data_points[0]
attributes.update(number_data_point.attributes)
attributes["__name__"] = metric.name +f"_{metric.unit}"

for label in proto_out.labels:
assert label.value == str(attributes[label.name])

# Ensure we have one sample with the correct time & value
assert len(series.samples) == 1
sample = proto_out.samples[0]
assert sample.timestamp == (number_data_point.time_unix_nano // 1_000_000)
assert sample.value == number_data_point.value



class TestValidation(unittest.TestCase):
# Test cases to ensure exporter parameter validation works as intended
Expand Down

0 comments on commit 82979a5

Please sign in to comment.