Skip to content

Commit

Permalink
Merge branch 'main' into instrumentation-sqlclient-drop-db.statement_…
Browse files Browse the repository at this point in the history
…type
  • Loading branch information
Kielek committed Feb 5, 2024
2 parents 2414ed1 + ef942a1 commit 354e9b8
Show file tree
Hide file tree
Showing 14 changed files with 153 additions and 67 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/Component.BuildTest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,15 @@ jobs:
run: dotnet-coverage merge -r -f cobertura -o ./TestResults/Cobertura.xml ./TestResults/*.coverage

- name: Upload code coverage ${{ inputs.code-cov-prefix }}-${{ inputs.code-cov-name }}
uses: codecov/codecov-action@v3.1.5
uses: codecov/codecov-action@v4
continue-on-error: true # Note: Don't fail for upload failures
env:
OS: ${{ matrix.os }}
TFM: ${{ matrix.version }}
token: ${{ secrets.CODECOV_TOKEN }}
with:
file: TestResults/Cobertura.xml
env_vars: OS,TFM
flags: ${{ inputs.code-cov-prefix }}-${{ inputs.code-cov-name }}
name: Code Coverage for ${{ inputs.code-cov-prefix }}-${{ inputs.code-cov-name }} on [${{ matrix.os }}.${{ matrix.version }}]
codecov_yml_path: .github/codecov.yml
2 changes: 1 addition & 1 deletion .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:

steps:
- name: configure Pagefile
uses: al-cheb/configure-pagefile-action@v1.3
uses: al-cheb/configure-pagefile-action@v1.4
with:
minimum-size: 8GB
maximum-size: 32GB
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/markdownlint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
uses: actions/checkout@v4

- name: run markdownlint
uses: DavidAnson/markdownlint-cli2-action@v14.0.0
uses: DavidAnson/markdownlint-cli2-action@v15.0.0
with:
globs: |
**/*.md
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,17 @@

## Unreleased

* Export OpenMetrics format from Prometheus exporters ([#5107](https://github.com/open-telemetry/opentelemetry-dotnet/pull/5107))
* Export OpenMetrics format from Prometheus exporters
([#5107](https://github.com/open-telemetry/opentelemetry-dotnet/pull/5107))

* For requests with OpenMetrics format, scope info is automatically added
([#5086](https://github.com/open-telemetry/opentelemetry-dotnet/pull/5086)
[#5182](https://github.com/open-telemetry/opentelemetry-dotnet/pull/5182))

* **Breaking change** Updated the `PrometheusHttpListener` to throw an exception
if it can't be started.
([#5304](https://github.com/open-telemetry/opentelemetry-dotnet/pull/5304))

## 1.7.0-rc.1

Released 2023-Nov-29
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ public void Start(CancellationToken token = default)
return;
}

this.httpListener.Start();

// link the passed in token if not null
this.tokenSource = token == default ?
new CancellationTokenSource() :
Expand Down Expand Up @@ -91,7 +93,7 @@ public void Dispose()
{
this.Stop();

if (this.httpListener != null && this.httpListener.IsListening)
if (this.httpListener.IsListening)
{
this.httpListener.Close();
}
Expand All @@ -111,8 +113,6 @@ private static bool AcceptsOpenMetrics(HttpListenerRequest request)

private void WorkerProc()
{
this.httpListener.Start();

try
{
using var scope = SuppressInstrumentationScope.Begin();
Expand Down
23 changes: 10 additions & 13 deletions src/OpenTelemetry/Metrics/AggregatorStore.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ namespace OpenTelemetry.Metrics;
internal sealed class AggregatorStore
{
internal readonly bool OutputDelta;
internal readonly bool ShouldReclaimUnusedMetricPoints;
internal readonly bool OutputDeltaWithUnusedMetricPointReclaimEnabled;
internal long DroppedMeasurements = 0;

private static readonly string MetricPointCapHitFixMessage = "Consider opting in for the experimental SDK feature to emit all the throttled metrics under the overflow attribute by setting env variable OTEL_DOTNET_EXPERIMENTAL_METRICS_EMIT_OVERFLOW_ATTRIBUTE = true. You could also modify instrumentation to reduce the number of unique key/value pair combinations. Or use Views to drop unwanted tags. Or use MeterProviderBuilder.SetMaxMetricPointsPerMetricStream to set higher limit.";
Expand Down Expand Up @@ -101,9 +101,9 @@ internal AggregatorStore(
reservedMetricPointsCount++;
}

this.ShouldReclaimUnusedMetricPoints = shouldReclaimUnusedMetricPoints;
this.OutputDeltaWithUnusedMetricPointReclaimEnabled = shouldReclaimUnusedMetricPoints && this.OutputDelta;

if (this.OutputDelta && shouldReclaimUnusedMetricPoints)
if (this.OutputDeltaWithUnusedMetricPointReclaimEnabled)
{
this.availableMetricPoints = new Queue<int>(maxMetricPoints - reservedMetricPointsCount);

Expand Down Expand Up @@ -158,17 +158,14 @@ internal void Update(double value, ReadOnlySpan<KeyValuePair<string, object?>> t
internal int Snapshot()
{
this.batchSize = 0;
if (this.OutputDelta)
if (this.OutputDeltaWithUnusedMetricPointReclaimEnabled)
{
if (this.ShouldReclaimUnusedMetricPoints)
{
this.SnapshotDeltaWithMetricPointReclaim();
}
else
{
var indexSnapshot = Math.Min(this.metricPointIndex, this.maxMetricPoints - 1);
this.SnapshotDelta(indexSnapshot);
}
this.SnapshotDeltaWithMetricPointReclaim();
}
else if (this.OutputDelta)
{
var indexSnapshot = Math.Min(this.metricPointIndex, this.maxMetricPoints - 1);
this.SnapshotDelta(indexSnapshot);
}
else
{
Expand Down
14 changes: 5 additions & 9 deletions src/OpenTelemetry/Metrics/MetricPoint.cs
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,7 @@ internal MetricPoint(
{
Debug.Assert(aggregatorStore != null, "AggregatorStore was null.");
Debug.Assert(histogramExplicitBounds != null, "Histogram explicit Bounds was null.");

if (aggregatorStore!.OutputDelta && aggregatorStore.ShouldReclaimUnusedMetricPoints)
{
Debug.Assert(lookupData != null, "LookupData was null.");
}
Debug.Assert(!aggregatorStore!.OutputDeltaWithUnusedMetricPointReclaimEnabled || lookupData != null, "LookupData was null.");

this.aggType = aggType;
this.Tags = new ReadOnlyTagCollection(tagKeysAndValues);
Expand Down Expand Up @@ -445,7 +441,7 @@ internal void Update(long number)
// by ignoring Zero points
this.MetricPointStatus = MetricPointStatus.CollectPending;

if (this.aggregatorStore.OutputDelta)
if (this.aggregatorStore.OutputDeltaWithUnusedMetricPointReclaimEnabled)
{
Interlocked.Decrement(ref this.ReferenceCount);
}
Expand Down Expand Up @@ -570,7 +566,7 @@ internal void UpdateWithExemplar(long number, ReadOnlySpan<KeyValuePair<string,
// by ignoring Zero points
this.MetricPointStatus = MetricPointStatus.CollectPending;

if (this.aggregatorStore.OutputDelta)
if (this.aggregatorStore.OutputDeltaWithUnusedMetricPointReclaimEnabled)
{
Interlocked.Decrement(ref this.ReferenceCount);
}
Expand Down Expand Up @@ -666,7 +662,7 @@ internal void Update(double number)
// by ignoring Zero points
this.MetricPointStatus = MetricPointStatus.CollectPending;

if (this.aggregatorStore.OutputDelta)
if (this.aggregatorStore.OutputDeltaWithUnusedMetricPointReclaimEnabled)
{
Interlocked.Decrement(ref this.ReferenceCount);
}
Expand Down Expand Up @@ -797,7 +793,7 @@ internal void UpdateWithExemplar(double number, ReadOnlySpan<KeyValuePair<string
// by ignoring Zero points
this.MetricPointStatus = MetricPointStatus.CollectPending;

if (this.aggregatorStore.OutputDelta)
if (this.aggregatorStore.OutputDeltaWithUnusedMetricPointReclaimEnabled)
{
Interlocked.Decrement(ref this.ReferenceCount);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ public void TestAddPrometheusHttpListener_NamedOptions()
services.Configure<PrometheusHttpListenerOptions>("Exporter2", o => namedExporterOptionsConfigureOptionsInvocations++);
})
.AddPrometheusHttpListener()
.AddPrometheusHttpListener("Exporter2", o => { })
.AddPrometheusHttpListener("Exporter2", o => o.ScrapeEndpointPath = "/metrics2")
.Build();

Assert.Equal(1, defaultExporterOptionsConfigureOptionsInvocations);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,19 +29,15 @@ public class PrometheusHttpListenerTests
[InlineData("http://example.com")]
public void UriPrefixesPositiveTest(params string[] uriPrefixes)
{
using MeterProvider meterProvider = Sdk.CreateMeterProviderBuilder()
.AddPrometheusHttpListener(options => options.UriPrefixes = uriPrefixes)
.Build();
TestPrometheusHttpListenerUriPrefixOptions(uriPrefixes);
}

[Fact]
public void UriPrefixesNull()
{
Assert.Throws<ArgumentNullException>(() =>
{
using MeterProvider meterProvider = Sdk.CreateMeterProviderBuilder()
.AddPrometheusHttpListener(options => options.UriPrefixes = null)
.Build();
TestPrometheusHttpListenerUriPrefixOptions(null);
});
}

Expand All @@ -50,20 +46,16 @@ public void UriPrefixesEmptyList()
{
Assert.Throws<ArgumentException>(() =>
{
using MeterProvider meterProvider = Sdk.CreateMeterProviderBuilder()
.AddPrometheusHttpListener(options => options.UriPrefixes = new string[] { })
.Build();
TestPrometheusHttpListenerUriPrefixOptions(new string[] { });
});
}

[Fact]
public void UriPrefixesInvalid()
{
Assert.Throws<InvalidOperationException>(() =>
Assert.Throws<ArgumentException>(() =>
{
using MeterProvider meterProvider = Sdk.CreateMeterProviderBuilder()
.AddPrometheusHttpListener(options => options.UriPrefixes = new string[] { "ftp://example.com" })
.Build();
TestPrometheusHttpListenerUriPrefixOptions(new string[] { "ftp://example.com" });
});
}

Expand Down Expand Up @@ -91,6 +83,77 @@ public async Task PrometheusExporterHttpServerIntegration_UseOpenMetricsVersionH
await this.RunPrometheusExporterHttpServerIntegrationTest(acceptHeader: "application/openmetrics-text; version=1.0.0");
}

[Fact]
public void PrometheusHttpListenerThrowsOnStart()
{
Random random = new Random();
int retryAttempts = 5;
int port = 0;
string address = null;

PrometheusExporter exporter = null;
PrometheusHttpListener listener = null;

// Step 1: Start a listener on a random port.
while (retryAttempts-- != 0)
{
port = random.Next(2000, 5000);
address = $"http://localhost:{port}/";

try
{
exporter = new PrometheusExporter(new());
listener = new PrometheusHttpListener(
exporter,
new()
{
UriPrefixes = new string[] { address },
});

listener.Start();

break;
}
catch
{
// ignored
}
}

if (retryAttempts == 0)
{
throw new InvalidOperationException("PrometheusHttpListener could not be started");
}

// Step 2: Make sure if we start a second listener on the same port an exception is thrown.
Assert.Throws<HttpListenerException>(() =>
{
using var exporter = new PrometheusExporter(new());
using var listener = new PrometheusHttpListener(
exporter,
new()
{
UriPrefixes = new string[] { address },
});

listener.Start();
});

exporter?.Dispose();
listener?.Dispose();
}

private static void TestPrometheusHttpListenerUriPrefixOptions(string[] uriPrefixes)
{
using var exporter = new PrometheusExporter(new());
using var listener = new PrometheusHttpListener(
exporter,
new()
{
UriPrefixes = uriPrefixes,
});
}

private async Task RunPrometheusExporterHttpServerIntegrationTest(bool skipMetrics = false, string acceptHeader = "application/openmetrics-text")
{
var requestOpenMetrics = acceptHeader.StartsWith("application/openmetrics-text");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
| :green_heart: | ConventionalRouting | [Not Found (404)](#conventionalrouting-not-found-404) |
| :green_heart: | ConventionalRouting | [Route template with parameter constraint](#conventionalrouting-route-template-with-parameter-constraint) |
| :green_heart: | ConventionalRouting | [Path that does not match parameter constraint](#conventionalrouting-path-that-does-not-match-parameter-constraint) |
| :broken_heart: | ConventionalRouting | [Area using area:exists, default controller/action](#conventionalrouting-area-using-areaexists-default-controlleraction) |
| :broken_heart: | ConventionalRouting | [Area using area:exists, non-default action](#conventionalrouting-area-using-areaexists-non-default-action) |
| :broken_heart: | ConventionalRouting | [Area w/o area:exists, default controller/action](#conventionalrouting-area-wo-areaexists-default-controlleraction) |
| :broken_heart: | ConventionalRouting | [Area using `area:exists`, default controller/action](#conventionalrouting-area-using-areaexists-default-controlleraction) |
| :broken_heart: | ConventionalRouting | [Area using `area:exists`, non-default action](#conventionalrouting-area-using-areaexists-non-default-action) |
| :broken_heart: | ConventionalRouting | [Area w/o `area:exists`, default controller/action](#conventionalrouting-area-wo-areaexists-default-controlleraction) |
| :green_heart: | AttributeRouting | [Default action](#attributerouting-default-action) |
| :green_heart: | AttributeRouting | [Action without parameter](#attributerouting-action-without-parameter) |
| :green_heart: | AttributeRouting | [Action with parameter](#attributerouting-action-with-parameter) |
Expand Down Expand Up @@ -194,7 +194,7 @@
}
```

## ConventionalRouting: Area using area:exists, default controller/action
## ConventionalRouting: Area using `area:exists`, default controller/action

```json
{
Expand Down Expand Up @@ -225,7 +225,7 @@
}
```

## ConventionalRouting: Area using area:exists, non-default action
## ConventionalRouting: Area using `area:exists`, non-default action

```json
{
Expand Down Expand Up @@ -256,7 +256,7 @@
}
```

## ConventionalRouting: Area w/o area:exists, default controller/action
## ConventionalRouting: Area w/o `area:exists`, default controller/action

```json
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
| :green_heart: | ConventionalRouting | [Not Found (404)](#conventionalrouting-not-found-404) |
| :green_heart: | ConventionalRouting | [Route template with parameter constraint](#conventionalrouting-route-template-with-parameter-constraint) |
| :green_heart: | ConventionalRouting | [Path that does not match parameter constraint](#conventionalrouting-path-that-does-not-match-parameter-constraint) |
| :broken_heart: | ConventionalRouting | [Area using area:exists, default controller/action](#conventionalrouting-area-using-areaexists-default-controlleraction) |
| :broken_heart: | ConventionalRouting | [Area using area:exists, non-default action](#conventionalrouting-area-using-areaexists-non-default-action) |
| :broken_heart: | ConventionalRouting | [Area w/o area:exists, default controller/action](#conventionalrouting-area-wo-areaexists-default-controlleraction) |
| :broken_heart: | ConventionalRouting | [Area using `area:exists`, default controller/action](#conventionalrouting-area-using-areaexists-default-controlleraction) |
| :broken_heart: | ConventionalRouting | [Area using `area:exists`, non-default action](#conventionalrouting-area-using-areaexists-non-default-action) |
| :broken_heart: | ConventionalRouting | [Area w/o `area:exists`, default controller/action](#conventionalrouting-area-wo-areaexists-default-controlleraction) |
| :green_heart: | AttributeRouting | [Default action](#attributerouting-default-action) |
| :green_heart: | AttributeRouting | [Action without parameter](#attributerouting-action-without-parameter) |
| :green_heart: | AttributeRouting | [Action with parameter](#attributerouting-action-with-parameter) |
Expand Down Expand Up @@ -196,7 +196,7 @@
}
```

## ConventionalRouting: Area using area:exists, default controller/action
## ConventionalRouting: Area using `area:exists`, default controller/action

```json
{
Expand Down Expand Up @@ -227,7 +227,7 @@
}
```

## ConventionalRouting: Area using area:exists, non-default action
## ConventionalRouting: Area using `area:exists`, non-default action

```json
{
Expand Down Expand Up @@ -258,7 +258,7 @@
}
```

## ConventionalRouting: Area w/o area:exists, default controller/action
## ConventionalRouting: Area w/o `area:exists`, default controller/action

```json
{
Expand Down
Loading

0 comments on commit 354e9b8

Please sign in to comment.