From a4caca708245de8dbb2d4e6a6861de33547f9aa5 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 16 Nov 2023 17:52:27 -0500 Subject: [PATCH 01/94] Move addMetricsIfPresent into the metrics builder as a first class method for others to leverage. Signed-off-by: Greg Schohn --- .../migrations/coreutils/MetricsLogBuilder.java | 10 ++++++++-- .../replay/ParsedHttpMessagesAsDicts.java | 15 +++++---------- .../migrations/replay/ReplayEngine.java | 1 - .../replay/netty/BacksideHttpWatcherHandler.java | 1 - 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java index 0e18e6218..3126eb4e5 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java @@ -5,10 +5,11 @@ import org.slf4j.Logger; import org.slf4j.spi.LoggingEventBuilder; +import java.util.Optional; + @Slf4j -public -class MetricsLogBuilder { +public class MetricsLogBuilder { private Logger logger; private LoggingEventBuilder loggingEventBuilder; @@ -16,6 +17,11 @@ public MetricsLogBuilder(Logger logger) { this.logger = logger; } + public static MetricsLogBuilder addMetricIfPresent(MetricsLogBuilder metricBuilder, + MetricsAttributeKey key, Optional value) { + return value.map(v -> metricBuilder.setAttribute(key, v)).orElse(metricBuilder); + } + public MetricsLogBuilder setAttribute(MetricsAttributeKey key, Object value) { loggingEventBuilder.addKeyValue(key.getKeyName(), value); return this; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java index df2037eb6..4307ec6ea 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java @@ -89,11 +89,6 @@ public ParsedHttpMessagesAsDicts(Optional> sourceRequestOp1, this.targetResponseOp = targetResponseOp4; } - private static MetricsLogBuilder addMetricIfPresent(MetricsLogBuilder metricBuilder, - MetricsAttributeKey key, Optional value) { - return value.map(v -> metricBuilder.setAttribute(key, v)).orElse(metricBuilder); - } - public MetricsLogBuilder buildStatusCodeMetrics(MetricsLogger logger, UniqueSourceRequestKey requestKey) { var builder = logger.atSuccess(MetricsEvent.STATUS_CODE_COMPARISON); return buildStatusCodeMetrics(builder, requestKey); @@ -111,14 +106,14 @@ public static MetricsLogBuilder buildStatusCodeMetrics(MetricsLogBuilder builder var targetStatus = targetResponseOp.map(r -> r.get(STATUS_CODE_KEY)); builder = builder.setAttribute(MetricsAttributeKey.REQUEST_ID, requestKey.getTrafficStreamKey().getConnectionId() + "." + requestKey.getSourceRequestIndex()); - builder = addMetricIfPresent(builder, MetricsAttributeKey.SOURCE_HTTP_STATUS, sourceStatus); - builder = addMetricIfPresent(builder, MetricsAttributeKey.TARGET_HTTP_STATUS, targetStatus); - builder = addMetricIfPresent(builder, MetricsAttributeKey.HTTP_STATUS_MATCH, + builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.SOURCE_HTTP_STATUS, sourceStatus); + builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.TARGET_HTTP_STATUS, targetStatus); + builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.HTTP_STATUS_MATCH, sourceStatus.flatMap(ss -> targetStatus.map(ts -> ss.equals(ts))) .filter(x -> x).map(b -> (Object) 1).or(() -> Optional.of(Integer.valueOf(0)))); - builder = addMetricIfPresent(builder, MetricsAttributeKey.HTTP_METHOD, + builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.HTTP_METHOD, sourceResponseOp.map(r -> r.get("Method"))); - builder = addMetricIfPresent(builder, MetricsAttributeKey.HTTP_ENDPOINT, + builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.HTTP_ENDPOINT, sourceResponseOp.map(r -> r.get("Request-URI"))); return builder; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index dd823156a..542243d56 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.replay; import io.netty.buffer.ByteBuf; -import io.netty.util.concurrent.Future; import io.netty.util.concurrent.ScheduledFuture; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideHttpWatcherHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideHttpWatcherHandler.java index a552fe876..7cbb36864 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideHttpWatcherHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideHttpWatcherHandler.java @@ -3,7 +3,6 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.FullHttpResponse; -import lombok.extern.log4j.Log4j2; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; From c026588ef5a9cb1bca3aaf302481bd41444a7041 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 17 Nov 2023 11:02:14 -0500 Subject: [PATCH 02/94] WIP to play with OpenTelemetry metric instruments and tracer spans. Most of this is just playing, but making the StreamManager implement AutoCloseable gives a place to end spans to show how long a serializer/connection factory was relevant for. Signed-off-by: Greg Schohn --- .../captureKafkaOffloader/build.gradle | 14 ++- .../kafkaoffloader/KafkaCaptureFactory.java | 42 +++++++- .../FileConnectionCaptureFactory.java | 3 + ...eamChannelConnectionCaptureSerializer.java | 32 +++--- .../StreamLifecycleManager.java | 4 +- ...hannelConnectionCaptureSerializerTest.java | 3 + .../InMemoryConnectionCaptureFactory.java | 3 + TrafficCapture/coreUtilities/build.gradle | 8 +- .../migrations/coreutils/MetricsLogger.java | 97 ++++++++++++++++--- .../src/main/docker/docker-compose.yml | 5 + .../src/main/docker/otelcol/otel-config.yml | 4 +- ...ReliableLoggingHttpRequestHandlerTest.java | 3 + .../proxyserver/CaptureProxy.java | 8 +- .../src/main/resources/log4j2.properties | 7 +- 14 files changed, 184 insertions(+), 49 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index 11f4e3a06..d7bb474e9 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -12,11 +12,15 @@ dependencies { api 'io.netty:netty-buffer:4.1.100.Final' implementation project(':captureOffloader') implementation project(':coreUtilities') - implementation 'org.projectlombok:lombok:1.18.26' - implementation 'com.google.protobuf:protobuf-java:3.22.2' - implementation 'org.apache.kafka:kafka-clients:3.6.0' - implementation 'software.amazon.msk:aws-msk-iam-auth:1.1.9' - implementation 'org.slf4j:slf4j-api:2.0.7' + + implementation group: 'com.google.protobuf', name:'protobuf-java', version:'3.22.2' + implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' + implementation group: 'org.projectlombok', name:'lombok', version:'1.18.26' + implementation group: 'org.apache.kafka', name:'kafka-clients', version:'3.6.0' + implementation group: 'software.amazon.msk', name:'aws-msk-iam-auth', version:'1.1.9' + + implementation group: 'org.slf4j', name:'slf4j-api', version:'2.0.7' + testImplementation project(':captureProtobufs') testImplementation 'org.mockito:mockito-core:4.6.1' testImplementation 'org.mockito:mockito-junit-jupiter:4.6.1' diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 756ec9739..acbe92c10 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -1,6 +1,12 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader; import com.google.protobuf.CodedOutputStream; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import lombok.AllArgsConstructor; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @@ -17,7 +23,10 @@ import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; +import java.io.IOException; import java.nio.ByteBuffer; +import java.time.Duration; +import java.time.Instant; import java.util.Arrays; import java.util.concurrent.CompletableFuture; @@ -31,6 +40,7 @@ public class KafkaCaptureFactory implements IConnectionCaptureFactory producer, int @Override public IChannelConnectionCaptureSerializer createOffloader(String connectionId) { - return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager(connectionId)); + var tracer = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME); + Span connectionSpan = tracer.spanBuilder("connection").startSpan(); + + try (var namedOnlyForAutoClose = Context.current().with(connectionSpan).makeCurrent()) { + var meter = GlobalOpenTelemetry.get().getMeter(TELEMETRY_SCOPE_NAME); + meter.counterBuilder("connection_created").build().add(1); + } + + return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, + new StreamManager(connectionSpan, connectionId)); } @AllArgsConstructor @@ -65,9 +84,28 @@ static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { } } - @AllArgsConstructor class StreamManager extends OrderedStreamLifecyleManager { + Span telemetrySpan; String connectionId; + Instant startTime; + + public StreamManager(Span telemetrySpan, String connectionId) { + this.telemetrySpan = telemetrySpan; + this.connectionId = connectionId; + this.startTime = Instant.now(); + } + + @Override + public void close() throws IOException { + try (var namedOnlyForAutoClose = Context.current().with(telemetrySpan).makeCurrent()) { + var histogram = GlobalOpenTelemetry.get().getMeter(TELEMETRY_SCOPE_NAME) + .histogramBuilder("connection_lifetime").build(); + telemetrySpan.setAttribute("connectionId", connectionId); + histogram.record((double) Duration.between(startTime, Instant.now()).toMillis(), Attributes.empty(), + Context.current().with(telemetrySpan)); + telemetrySpan.end(); + } + } @Override public CodedOutputStreamWrapper createStream() { diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java index 26cee767d..751d39f63 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java @@ -51,6 +51,9 @@ public FileConnectionCaptureFactory(String nodeId, String path, int bufferSize) @AllArgsConstructor class StreamManager extends OrderedStreamLifecyleManager { String connectionId; + @Override + public void close() {} + @Override public CodedOutputStreamAndByteBufferWrapper createStream() { return new CodedOutputStreamAndByteBufferWrapper(bufferSize); diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java index 14c501d37..d59bcc512 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java @@ -188,20 +188,23 @@ public CompletableFuture flushCommitAndResetStream(boolean isFinal) throws IO if (streamHasBeenClosed || (currentCodedOutputStreamHolderOrNull == null && !isFinal)) { return CompletableFuture.completedFuture(null); } - CodedOutputStream currentStream = getOrCreateCodedOutputStream(); - var fieldNum = isFinal ? TrafficStream.NUMBEROFTHISLASTCHUNK_FIELD_NUMBER : TrafficStream.NUMBER_FIELD_NUMBER; - // e.g. 3: 1 - currentStream.writeInt32(fieldNum, ++numFlushesSoFar); - log.trace("Flushing the current CodedOutputStream for {}.{}", connectionIdString, numFlushesSoFar); - currentStream.flush(); - assert currentStream == currentCodedOutputStreamHolderOrNull.getOutputStream() : "Expected the stream that " + - "is being finalized to be the same stream contained by currentCodedOutputStreamHolderOrNull"; - var future = streamManager.closeStream(currentCodedOutputStreamHolderOrNull, numFlushesSoFar); - currentCodedOutputStreamHolderOrNull = null; - if (isFinal) { - streamHasBeenClosed = true; + try { + CodedOutputStream currentStream = getOrCreateCodedOutputStream(); + var fieldNum = isFinal ? TrafficStream.NUMBEROFTHISLASTCHUNK_FIELD_NUMBER : TrafficStream.NUMBER_FIELD_NUMBER; + // e.g. 3: 1 + currentStream.writeInt32(fieldNum, ++numFlushesSoFar); + log.trace("Flushing the current CodedOutputStream for {}.{}", connectionIdString, numFlushesSoFar); + currentStream.flush(); + assert currentStream == currentCodedOutputStreamHolderOrNull.getOutputStream() : "Expected the stream that " + + "is being finalized to be the same stream contained by currentCodedOutputStreamHolderOrNull"; + return streamManager.closeStream(currentCodedOutputStreamHolderOrNull, numFlushesSoFar); + } finally { + currentCodedOutputStreamHolderOrNull = null; + if (isFinal) { + streamHasBeenClosed = true; + streamManager.close(); + } } - return future; } @Override @@ -222,7 +225,8 @@ public void addDisconnectEvent(Instant timestamp) throws IOException { @Override public void addCloseEvent(Instant timestamp) throws IOException { beginSubstreamObservation(timestamp, TrafficObservation.CLOSE_FIELD_NUMBER, 1); - getOrCreateCodedOutputStream().writeMessage(TrafficObservation.CLOSE_FIELD_NUMBER, CloseObservation.getDefaultInstance()); + getOrCreateCodedOutputStream().writeMessage(TrafficObservation.CLOSE_FIELD_NUMBER, + CloseObservation.getDefaultInstance()); } @Override diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java index 18db43cc4..b41af74a5 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java @@ -1,9 +1,11 @@ package org.opensearch.migrations.trafficcapture; +import java.io.IOException; import java.util.concurrent.CompletableFuture; -public interface StreamLifecycleManager { +public interface StreamLifecycleManager extends AutoCloseable { CodedOutputStreamHolder createStream(); CompletableFuture closeStream(CodedOutputStreamHolder outputStreamHolder, int index); + void close() throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java b/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java index 4a25aa4d7..97def992c 100644 --- a/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java +++ b/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java @@ -320,6 +320,9 @@ class StreamManager extends OrderedStreamLifecyleManager { int bufferSize; ConcurrentLinkedQueue outputBuffers; + @Override + public void close() {} + @Override public CodedOutputStreamHolder createStream() { return new CodedOutputStreamAndByteBufferWrapper(bufferSize); diff --git a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java index 8af6b3a89..b63ef52af 100644 --- a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java @@ -36,6 +36,9 @@ public InMemoryConnectionCaptureFactory(String nodeId, int bufferSize, Runnable @AllArgsConstructor class StreamManager extends OrderedStreamLifecyleManager { + @Override + public void close() {} + @Override public CodedOutputStreamHolder createStream() { return new CodedOutputStreamAndByteBufferWrapper(bufferSize); diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index 1c4404477..da76d0f15 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -56,9 +56,11 @@ dependencies { implementation("org.apache.logging.log4j:log4j-slf4j2-impl:2.20.0") // OpenTelemetry core - implementation("io.opentelemetry:opentelemetry-sdk:1.30.0") - implementation("io.opentelemetry:opentelemetry-exporter-otlp:1.30.0") - implementation("io.opentelemetry:opentelemetry-semconv:1.30.1-alpha") + implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' + implementation group: 'io.opentelemetry', name:'opentelemetry-exporter-otlp', version: '1.30.0' + implementation group: 'io.opentelemetry', name:'opentelemetry-sdk', version: '1.30.0' + implementation group: 'io.opentelemetry.instrumentation', name:'opentelemetry-log4j-appender-2.17', version: '1.30.0-alpha' + implementation group: 'io.opentelemetry', name:'opentelemetry-semconv', version: '1.30.0-alpha' // OpenTelemetry log4j appender implementation("io.opentelemetry.instrumentation:opentelemetry-log4j-appender-2.17:1.30.0-alpha") diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java index ec62c88d1..23f73aa29 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java @@ -1,17 +1,30 @@ package org.opensearch.migrations.coreutils; + import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; -import io.opentelemetry.instrumentation.log4j.appender.v2_17.OpenTelemetryAppender; +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; import org.slf4j.Logger; import lombok.extern.slf4j.Slf4j; import org.slf4j.LoggerFactory; +import java.time.Duration; +import java.util.concurrent.TimeUnit; + @Slf4j public class MetricsLogger { @@ -29,14 +42,15 @@ public MetricsLogger(String source) { } public static void initializeOpenTelemetry(String serviceName, String collectorEndpoint) { - OpenTelemetrySdk sdk = + var serviceResource = Resource.getDefault().toBuilder() + .put(ResourceAttributes.SERVICE_NAME, serviceName) + .build(); + + OpenTelemetrySdk openTelemetrySdk = OpenTelemetrySdk.builder() .setLoggerProvider( SdkLoggerProvider.builder() - .setResource( - Resource.getDefault().toBuilder() - .put(ResourceAttributes.SERVICE_NAME, serviceName) - .build()) + .setResource(serviceResource) .addLogRecordProcessor( BatchLogRecordProcessor.builder( OtlpGrpcLogRecordExporter.builder() @@ -44,14 +58,68 @@ public static void initializeOpenTelemetry(String serviceName, String collectorE .build()) .build()) .build()) - .build(); - GlobalOpenTelemetry.set(sdk); + .setTracerProvider( + SdkTracerProvider.builder() + .setResource(serviceResource) + .addSpanProcessor( + BatchSpanProcessor.builder( + OtlpGrpcSpanExporter.builder() + .setEndpoint(collectorEndpoint) + .setTimeout(2, TimeUnit.SECONDS) + .build()) + .setScheduleDelay(100, TimeUnit.MILLISECONDS) + .build()) + .build()) + .setMeterProvider( + SdkMeterProvider.builder() + .setResource(serviceResource) + .registerMetricReader( + PeriodicMetricReader.builder( + OtlpGrpcMetricExporter.builder() + .setEndpoint(collectorEndpoint) + .build()) + .setInterval(Duration.ofMillis(1000)) + .build()) + .build()) + .buildAndRegisterGlobal(); // Add hook to close SDK, which flushes logs - Runtime.getRuntime().addShutdownHook(new Thread(sdk::close)); - OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); + Runtime.getRuntime().addShutdownHook(new Thread(openTelemetrySdk::close)); + //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); } + public static class SimpleMeteringClosure { + public final Meter meter; + public final Tracer tracer; + public SimpleMeteringClosure(String scopeName) { + meter = GlobalOpenTelemetry.getMeter(scopeName); + tracer = GlobalOpenTelemetry.getTracer(scopeName); + } + public void meterIncrementEvent(Context ctx, String eventName) { + meterIncrementEvent(ctx, eventName, 1); + } + public void meterIncrementEvent(Context ctx, String eventName, long increment) { + if (ctx == null) { return; } + try (var namedOnlyForAutoClose = ctx.makeCurrent()) { + meter.counterBuilder(eventName).build().add(increment); + } + } + public void meterDeltaEvent(Context ctx, String eventName, long delta) { + if (ctx == null) { return; } + try (var namedOnlyForAutoClose = ctx.makeCurrent()) { + meter.upDownCounterBuilder(eventName).build().add(delta); + } + } + public void meterHistogramMillis(Context ctx, String eventName, Duration between) { + meterHistogram(ctx, eventName, (double) between.toMillis()); + } + public void meterHistogram(Context ctx, String eventName, double value) { + if (ctx == null) { return; } + try (var namedOnlyForAutoClose = ctx.makeCurrent()) { + meter.histogramBuilder(eventName).build().record(value); + } + } + } /** * To indicate a successful event (e.g. data received or data sent) that may be a helpful @@ -61,7 +129,7 @@ public static void initializeOpenTelemetry(String serviceName, String collectorE * metricsLogger.atSuccess().addKeyValue("key", "value").setMessage("Task succeeded").log(); */ public MetricsLogBuilder atSuccess(MetricsEvent event) { - return new MetricsLogBuilder(logger).atSuccess(event); + return new MetricsLogBuilder().atSuccess(event); } /** @@ -74,7 +142,7 @@ public MetricsLogBuilder atError(MetricsEvent event, Throwable cause) { if (cause == null) { return atError(event); } - return new MetricsLogBuilder(logger).atError(event) + return new MetricsLogBuilder().atError(event) .setAttribute(MetricsAttributeKey.EXCEPTION_MESSAGE, cause.getMessage()) .setAttribute(MetricsAttributeKey.EXCEPTION_TYPE, cause.getClass().getName()); } @@ -84,10 +152,11 @@ public MetricsLogBuilder atError(MetricsEvent event, Throwable cause) { * there is a failure that isn't indicated by an Exception being thrown. */ public MetricsLogBuilder atError(MetricsEvent event) { - return new MetricsLogBuilder(logger).atError(event); + + return new MetricsLogBuilder().atError(event); } public MetricsLogBuilder atTrace(MetricsEvent event) { - return new MetricsLogBuilder(logger).atTrace(event); + return new MetricsLogBuilder().atTrace(event); } } diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index 14c1b00ff..283f42ded 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -9,6 +9,8 @@ services: ports: - "9200:9200" - "19200:19200" + volumes: + - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs environment: - http.port=19200 # Run processes for elasticsearch and capture proxy, and exit if either one ends @@ -70,6 +72,7 @@ services: - migrations volumes: - sharedReplayerOutput:/shared-replayer-output + - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs environment: - TUPLE_DIR_PATH=/shared-replayer-output/traffic-replayer-default depends_on: @@ -119,10 +122,12 @@ services: - "13133:13133" volumes: - ./otelcol/otel-config.yml:/etc/otel-config.yml + - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs networks: - migrations depends_on: - opensearchanalytics + command: tail -f /dev/null migration-console: image: 'migrations/migration_console:latest' diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml b/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml index f6ccc70e6..e84720698 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml @@ -52,6 +52,8 @@ exporters: insecure_skip_verify: true logging: verbosity: detailed + file: + path: /logs/filename.json debug: service: @@ -63,4 +65,4 @@ service: logs: receivers: [otlp] processors: [attributes] - exporters: [logging, debug, opensearch] \ No newline at end of file + exporters: [logging, debug, opensearch, file] \ No newline at end of file diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index 531d206ad..3110d0188 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -39,6 +39,9 @@ static class StreamManager extends OrderedStreamLifecyleManager { AtomicReference byteBufferAtomicReference; AtomicInteger flushCount = new AtomicInteger(); + @Override + public void close() {} + @Override public CodedOutputStreamAndByteBufferWrapper createStream() { return new CodedOutputStreamAndByteBufferWrapper(1024*1024); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 504cdf275..2d400d036 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -17,6 +17,7 @@ import org.apache.kafka.common.config.SaslConfigs; import org.apache.logging.log4j.core.util.NullOutputStream; import org.opensearch.common.settings.Settings; +import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.FileConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; @@ -43,8 +44,6 @@ import java.util.function.Supplier; import java.util.stream.Stream; -import static org.opensearch.migrations.coreutils.MetricsLogger.initializeOpenTelemetry; - @Slf4j public class CaptureProxy { @@ -177,6 +176,9 @@ private static IConnectionCaptureFactory getNullConnectionCaptureFactory System.err.println("No trace log directory specified. Logging to /dev/null"); return connectionId -> new StreamChannelConnectionCaptureSerializer<>(null, connectionId, new StreamLifecycleManager<>() { + @Override + public void close() {} + @Override public CodedOutputStreamHolder createStream() { return () -> CodedOutputStream.newInstance(NullOutputStream.getInstance()); @@ -282,7 +284,7 @@ public static void main(String[] args) throws InterruptedException, IOException var backsideUri = convertStringToUri(params.backsideUriString); if (params.otelCollectorEndpoint != null) { - initializeOpenTelemetry("capture-proxy", params.otelCollectorEndpoint); + MetricsLogger.initializeOpenTelemetry("capture-proxy", params.otelCollectorEndpoint); } var sksOp = Optional.ofNullable(params.sslConfigFilePath) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/resources/log4j2.properties b/TrafficCapture/trafficCaptureProxyServer/src/main/resources/log4j2.properties index 6bd32ae07..c35476b38 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/resources/log4j2.properties +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/resources/log4j2.properties @@ -1,4 +1,4 @@ -status = info +status = debug packages = io.opentelemetry.instrumentation.log4j.appender.v2_17 appenders = console, METRICS @@ -18,8 +18,3 @@ appender.METRICS.captureContextDataAttributes = * rootLogger.level = info rootLogger.appenderRefs = stderr rootLogger.appenderRef.stderr.ref = STDERR - -logger.MetricsLogger.name = MetricsLogger -logger.MetricsLogger.level = info -logger.MetricsLogger.additivity = false -logger.MetricsLogger.appenderRef.METRICS.ref = METRICS From f3c007707109f70dc8c92aa774a6d84d03cedd54 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 27 Nov 2023 15:56:52 -0500 Subject: [PATCH 03/94] Get gradle files and docker-compose in order to support otlp exports to the collector to prometheus, zipkin, etc Signed-off-by: Greg Schohn --- .../captureKafkaOffloader/build.gradle | 10 +-- .../src/main/docker/docker-compose.yml | 80 +++++++++++++++---- .../docker/otel-collector-config-demo.yaml | 50 ++++++++++++ .../src/main/docker/prometheus.yaml | 6 ++ TrafficCapture/nettyWireLogging/build.gradle | 14 +++- .../trafficCaptureProxyServer/build.gradle | 5 ++ 6 files changed, 143 insertions(+), 22 deletions(-) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index d7bb474e9..0b516f677 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -10,23 +10,23 @@ repositories { dependencies { api 'io.netty:netty-buffer:4.1.100.Final' + implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation project(':captureOffloader') implementation project(':coreUtilities') - implementation group: 'com.google.protobuf', name:'protobuf-java', version:'3.22.2' implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' implementation group: 'org.projectlombok', name:'lombok', version:'1.18.26' implementation group: 'org.apache.kafka', name:'kafka-clients', version:'3.6.0' - implementation group: 'software.amazon.msk', name:'aws-msk-iam-auth', version:'1.1.9' - implementation group: 'org.slf4j', name:'slf4j-api', version:'2.0.7' + implementation group: 'software.amazon.msk', name:'aws-msk-iam-auth', version:'1.1.9' testImplementation project(':captureProtobufs') - testImplementation 'org.mockito:mockito-core:4.6.1' - testImplementation 'org.mockito:mockito-junit-jupiter:4.6.1' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' + testImplementation group: 'org.mockito', name: 'mockito-core', version: '4.6.1' + testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '4.6.1' testImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' } diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index 283f42ded..e220c357d 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -1,6 +1,58 @@ version: '3.7' services: + + prometheus: + container_name: prometheus + image: prom/prometheus:latest + networks: + - migrations + volumes: + - ./prometheus.yaml:/etc/prometheus/prometheus.yml + ports: + - "9090:9090" + + # Jaeger + jaeger-all-in-one: + image: jaegertracing/all-in-one:latest + networks: + - migrations + ports: + - "16686:16686" + - "14268" + - "14250:14250" + environment: + - COLLECTOR_OTLP_ENABLED=true + + # Zipkin + zipkin-all-in-one: + image: openzipkin/zipkin:latest + networks: + - migrations + ports: + - "9411:9411" + + # Collector + otel-collector: + image: otel/opentelemetry-collector:latest +# command: ["--config=/etc/otel-collector-config-demo.yaml", "${OTELCOL_ARGS}"] + networks: + - migrations + volumes: +# - ./otel-collector-config-demo.yaml:/etc/otel-collector-config-demo.yaml + - ./otel-collector-config-demo.yaml:/etc/otelcol/config.yaml + - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs + ports: + - "1888:1888" # pprof extension + - "8888:8888" # Prometheus metrics exposed by the collector + - "8889:8889" # Prometheus exporter metrics + - "13133:13133" # health_check extension + - "55679:55679" # zpages extension + - "4317:4317" # otlp receiver + depends_on: + - jaeger-all-in-one + - zipkin-all-in-one + # Run combined instance of Capture Proxy and Elasticsearch capture-proxy-es: image: 'migrations/capture_proxy:latest' @@ -114,20 +166,20 @@ services: depends_on: - opensearchanalytics - otel-collector: - image: public.ecr.aws/a0w2c5q7/otelcol-with-opensearch:latest - container_name: otel-collector - ports: - - "4317:4317" - - "13133:13133" - volumes: - - ./otelcol/otel-config.yml:/etc/otel-config.yml - - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs - networks: - - migrations - depends_on: - - opensearchanalytics - command: tail -f /dev/null +# otel-collector: +# image: public.ecr.aws/a0w2c5q7/otelcol-with-opensearch:latest +# container_name: otel-collector +# ports: +# - "4317:4317" +# - "13133:13133" +# volumes: +# - ./otelcol/otel-config.yml:/etc/otel-config.yml +# - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs +# networks: +# - migrations +# depends_on: +# - opensearchanalytics +# command: tail -f /dev/null migration-console: image: 'migrations/migration_console:latest' diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml new file mode 100644 index 000000000..92582f7e8 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml @@ -0,0 +1,50 @@ +receivers: + otlp: + protocols: + grpc: + +exporters: + prometheus: + endpoint: "0.0.0.0:8889" + namespace: capturereplay + const_labels: + label1: value1 + logging: + loglevel: debug + + zipkin: + endpoint: "http://zipkin-all-in-one:9411/api/v2/spans" + format: proto + + otlp/jaeger: + endpoint: jaeger-all-in-one:14250 + tls: + insecure: true + +# Alternatively, use jaeger_thrift_http with the settings below. In this case +# update the list of exporters on the traces pipeline. +# +# jaeger_thrift_http: +# url: http://jaeger-all-in-one:14268/api/traces + +processors: + batch: + +extensions: + health_check: + pprof: + endpoint: :1888 + zpages: + endpoint: :55679 + +service: + extensions: [pprof, zpages, health_check] + pipelines: + traces: + receivers: [otlp] + processors: [batch] + exporters: [logging, zipkin, otlp/jaeger] + metrics: + receivers: [otlp] + processors: [batch] + exporters: [logging, prometheus] diff --git a/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml b/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml new file mode 100644 index 000000000..ddea76205 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml @@ -0,0 +1,6 @@ +scrape_configs: + - job_name: 'otel-collector' + scrape_interval: 2s + static_configs: + - targets: ['otel-collector:8889'] + - targets: ['otel-collector:8888'] diff --git a/TrafficCapture/nettyWireLogging/build.gradle b/TrafficCapture/nettyWireLogging/build.gradle index ade3744c9..1a3c2bb64 100644 --- a/TrafficCapture/nettyWireLogging/build.gradle +++ b/TrafficCapture/nettyWireLogging/build.gradle @@ -8,20 +8,28 @@ plugins { } dependencies { + implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.netty:netty-bom:4.1.100.Final") + implementation project(':captureOffloader') implementation project(':coreUtilities') api group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' + + implementation group: 'io.opentelemetry', name:'opentelemetry-api' implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testImplementation project(':captureProtobufs') - testImplementation group: 'org.apache.httpcomponents.client5', name: 'httpclient5', version: '5.2.1' - testImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testImplementation group: 'com.google.guava', name: 'guava', version: '32.0.1-jre' testImplementation group: 'com.google.protobuf', name: 'protobuf-java', version:'3.22.2' - + testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' + testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' + testImplementation group: 'org.apache.httpcomponents.client5', name: 'httpclient5', version: '5.2.1' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' + testImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testImplementation testFixtures(project(path: ':testUtilities')) + + } diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index f9f14abe4..05f00f8b8 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -38,9 +38,14 @@ dependencies { implementation group: 'com.beust', name: 'jcommander', version: '1.82' implementation 'com.google.protobuf:protobuf-java:3.22.2' + implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' + implementation group: 'io.opentelemetry', name: 'opentelemetry-sdk', version: '1.32.0' + testImplementation project(':captureProtobufs') testImplementation testFixtures(project(path: ':testUtilities')) testImplementation testFixtures(project(path: ':captureOffloader')) + + testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing', version: '1.32.0' } tasks.withType(Tar){ From 7fb8e2e9a2a2e5062fbf0c67ac48cef95b02f302 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 27 Nov 2023 17:06:30 -0500 Subject: [PATCH 04/94] WIP Signed-off-by: Greg Schohn --- .../captureKafkaOffloader/build.gradle | 2 +- .../kafkaoffloader/KafkaCaptureFactory.java | 89 +++++++++++++------ .../migrations/coreutils/MetricsLogger.java | 8 +- .../src/main/docker/docker-compose.yml | 52 +++++------ TrafficCapture/nettyWireLogging/build.gradle | 4 +- ...allyReliableLoggingHttpRequestHandler.java | 22 ++++- .../netty/LoggingHttpRequestHandler.java | 41 ++++++++- .../netty/LoggingHttpResponseHandler.java | 49 ++++++++-- ...ReliableLoggingHttpRequestHandlerTest.java | 22 ++++- .../netty/ProxyChannelInitializer.java | 11 ++- .../src/main/resources/logging.properties | 9 ++ 11 files changed, 233 insertions(+), 76 deletions(-) create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/main/resources/logging.properties diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index 0b516f677..65e332890 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -9,12 +9,12 @@ repositories { } dependencies { - api 'io.netty:netty-buffer:4.1.100.Final' implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") implementation project(':captureOffloader') implementation project(':coreUtilities') implementation group: 'com.google.protobuf', name:'protobuf-java', version:'3.22.2' + api group:'io.netty', name:'netty-buffer', version: '4.1.100.Final' implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' implementation group: 'org.projectlombok', name:'lombok', version:'1.18.26' implementation group: 'org.apache.kafka', name:'kafka-clients', version:'3.6.0' diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index acbe92c10..a3696fefe 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -2,11 +2,9 @@ import com.google.protobuf.CodedOutputStream; import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.trace.Span; import io.opentelemetry.context.Context; -import io.opentelemetry.context.Scope; +import io.opentelemetry.context.ContextKey; import lombok.AllArgsConstructor; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @@ -28,19 +26,26 @@ import java.time.Duration; import java.time.Instant; import java.util.Arrays; +import java.util.Optional; import java.util.concurrent.CompletableFuture; @Slf4j public class KafkaCaptureFactory implements IConnectionCaptureFactory { + private static final ContextKey RECORD_ID_KEY = ContextKey.named("recordId"); + private static final ContextKey TOPIC_KEY = ContextKey.named("topic"); + private static final ContextKey RECORD_SIZE_KEY = ContextKey.named("recordSize"); + public static final String TELEMETRY_SCOPE_NAME = "KafkaCapture"; + public static final Optional METERING_CLOSURE_OP = + Optional.of(new MetricsLogger.SimpleMeteringClosure(TELEMETRY_SCOPE_NAME)); + private static final MetricsLogger metricsLogger = new MetricsLogger("BacksideHandler"); private static final String DEFAULT_TOPIC_NAME_FOR_TRAFFIC = "logging-traffic-topic"; // This value encapsulates overhead we should reserve for a given Producer record to account for record key bytes and // general Kafka message overhead public static final int KAFKA_MESSAGE_OVERHEAD_BYTES = 500; - public static final String TELEMETRY_SCOPE_NAME = "KafkaCaptureFactory"; private final String nodeId; // Potential future optimization here to use a direct buffer (e.g. nio) instead of byte array @@ -62,22 +67,24 @@ public KafkaCaptureFactory(String nodeId, Producer producer, int @Override public IChannelConnectionCaptureSerializer createOffloader(String connectionId) { - var tracer = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME); - Span connectionSpan = tracer.spanBuilder("connection").startSpan(); - - try (var namedOnlyForAutoClose = Context.current().with(connectionSpan).makeCurrent()) { - var meter = GlobalOpenTelemetry.get().getMeter(TELEMETRY_SCOPE_NAME); - meter.counterBuilder("connection_created").build().add(1); - } - + var context = METERING_CLOSURE_OP.map(m->{ + Span offloaderSpan = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("offloader").startSpan(); + offloaderSpan.setAttribute("offloaderConnectionId", connectionId); + var c = Context.current().with(offloaderSpan); + m.meterIncrementEvent(c, "offloader_created"); + m.meterDeltaEvent(c, "offloaders_active", 1); + return c; + }).orElse(null); return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, - new StreamManager(connectionSpan, connectionId)); + new StreamManager(context, connectionId)); } @AllArgsConstructor static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { private final CodedOutputStream codedOutputStream; private final ByteBuffer byteBuffer; + final Context streamContext; @Override public @NonNull CodedOutputStream getOutputStream() { return codedOutputStream; @@ -85,32 +92,37 @@ static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { } class StreamManager extends OrderedStreamLifecyleManager { - Span telemetrySpan; + Context telemetryContext; String connectionId; Instant startTime; - public StreamManager(Span telemetrySpan, String connectionId) { - this.telemetrySpan = telemetrySpan; + public StreamManager(Context incomingTelemetryContext, String connectionId) { + this.telemetryContext = incomingTelemetryContext; this.connectionId = connectionId; this.startTime = Instant.now(); } @Override public void close() throws IOException { - try (var namedOnlyForAutoClose = Context.current().with(telemetrySpan).makeCurrent()) { - var histogram = GlobalOpenTelemetry.get().getMeter(TELEMETRY_SCOPE_NAME) - .histogramBuilder("connection_lifetime").build(); - telemetrySpan.setAttribute("connectionId", connectionId); - histogram.record((double) Duration.between(startTime, Instant.now()).toMillis(), Attributes.empty(), - Context.current().with(telemetrySpan)); - telemetrySpan.end(); - } + METERING_CLOSURE_OP.ifPresent(m->{ + m.meterHistogramMillis(telemetryContext, "connection_lifetime", + Duration.between(startTime, Instant.now())); + m.meterDeltaEvent(telemetryContext, "offloaders_active", -1); + m.meterIncrementEvent(telemetryContext, "offloader_closed"); + }); + Span.fromContext(telemetryContext).end(); } @Override public CodedOutputStreamWrapper createStream() { + var newStreamCtx = METERING_CLOSURE_OP.map(m-> { + m.meterIncrementEvent(telemetryContext, "stream_created"); + try (var scope = telemetryContext.makeCurrent()) { + return Context.current().with(m.tracer.spanBuilder("recordStream").startSpan()); + } + }).orElse(null); ByteBuffer bb = ByteBuffer.allocate(bufferSize); - return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb); + return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb, newStreamCtx); } @Override @@ -131,8 +143,20 @@ public CodedOutputStreamWrapper createStream() { // Used to essentially wrap Future returned by Producer to CompletableFuture var cf = new CompletableFuture(); log.debug("Sending Kafka producer record: {} for topic: {}", recordId, topicNameForTraffic); + + var flushContext = METERING_CLOSURE_OP.map(m-> { + Span.fromContext(osh.streamContext).end(); + try (var scope = telemetryContext + .with(RECORD_ID_KEY, recordId) + .with(TOPIC_KEY, topicNameForTraffic) + .with(RECORD_SIZE_KEY, kafkaRecord.value().length).makeCurrent()) { + m.meterIncrementEvent(telemetryContext, "stream_flush_called"); + return Context.current().with(m.tracer.spanBuilder("flushRecord").startSpan()); + } + }).orElse(null); + // Async request to Kafka cluster - producer.send(kafkaRecord, handleProducerRecordSent(cf, recordId)); + producer.send(kafkaRecord, handleProducerRecordSent(cf, recordId, flushContext)); metricsLogger.atSuccess(MetricsEvent.RECORD_SENT_TO_KAFKA) .setAttribute(MetricsAttributeKey.CHANNEL_ID, connectionId) .setAttribute(MetricsAttributeKey.TOPIC_NAME, topicNameForTraffic) @@ -157,8 +181,18 @@ public CodedOutputStreamWrapper createStream() { * retried or not retried at all: https://kafka.apache.org/35/javadoc/org/apache/kafka/common/errors/RetriableException.html * as well as basic retry backoff */ - private Callback handleProducerRecordSent(CompletableFuture cf, String recordId) { + private Callback handleProducerRecordSent(CompletableFuture cf, String recordId, + Context flushContext) { return (metadata, exception) -> { + METERING_CLOSURE_OP.ifPresent(m-> { + m.meterIncrementEvent(telemetryContext, + exception==null ? "stream_flush_success" : "stream_flush_failure"); + m.meterIncrementEvent(telemetryContext, + exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", + flushContext.get(RECORD_SIZE_KEY)); + Span.fromContext(flushContext).end(); + }); + if (exception != null) { log.error("Error sending producer record: {}", recordId, exception); cf.completeExceptionally(exception); @@ -170,5 +204,4 @@ private Callback handleProducerRecordSent(CompletableFuture cf, }; } } - } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java index 23f73aa29..ea116fc47 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java @@ -129,7 +129,7 @@ public void meterHistogram(Context ctx, String eventName, double value) { * metricsLogger.atSuccess().addKeyValue("key", "value").setMessage("Task succeeded").log(); */ public MetricsLogBuilder atSuccess(MetricsEvent event) { - return new MetricsLogBuilder().atSuccess(event); + return new MetricsLogBuilder(logger).atSuccess(event); } /** @@ -142,7 +142,7 @@ public MetricsLogBuilder atError(MetricsEvent event, Throwable cause) { if (cause == null) { return atError(event); } - return new MetricsLogBuilder().atError(event) + return new MetricsLogBuilder(logger).atError(event) .setAttribute(MetricsAttributeKey.EXCEPTION_MESSAGE, cause.getMessage()) .setAttribute(MetricsAttributeKey.EXCEPTION_TYPE, cause.getClass().getName()); } @@ -153,10 +153,10 @@ public MetricsLogBuilder atError(MetricsEvent event, Throwable cause) { */ public MetricsLogBuilder atError(MetricsEvent event) { - return new MetricsLogBuilder().atError(event); + return new MetricsLogBuilder(logger).atError(event); } public MetricsLogBuilder atTrace(MetricsEvent event) { - return new MetricsLogBuilder().atTrace(event); + return new MetricsLogBuilder(logger).atTrace(event); } } diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index e220c357d..98625caa9 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -54,40 +54,40 @@ services: - zipkin-all-in-one # Run combined instance of Capture Proxy and Elasticsearch - capture-proxy-es: +# capture-proxy-es: +# image: 'migrations/capture_proxy:latest' +# networks: +# - migrations +# ports: +# - "9200:9200" +# - "19200:19200" +# volumes: +# - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs +# environment: +# - http.port=19200 +# # Run processes for elasticsearch and capture proxy, and exit if either one ends +# command: /bin/sh -c '/usr/local/bin/docker-entrypoint.sh eswrapper & /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://localhost:19200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317 & wait -n 1' +# depends_on: +# - kafka + +# Run separate instances of Capture Proxy and Elasticsearch + capture-proxy: image: 'migrations/capture_proxy:latest' networks: - migrations ports: - "9200:9200" - - "19200:19200" - volumes: - - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs - environment: - - http.port=19200 - # Run processes for elasticsearch and capture proxy, and exit if either one ends - command: /bin/sh -c '/usr/local/bin/docker-entrypoint.sh eswrapper & /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://localhost:19200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317 & wait -n 1' + command: /bin/sh -c "/runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://elasticsearch:9200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317" depends_on: - kafka + - elasticsearch -# Run separate instances of Capture Proxy and Elasticsearch -# capture-proxy: -# image: 'migrations/capture_proxy:latest' -# networks: -# - migrations -# ports: -# - "9200:9200" -# command: /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://elasticsearch:9200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml -# depends_on: -# - kafka -# - elasticsearch -# -# elasticsearch: -# image: 'migrations/elasticsearch_searchguard:latest' -# networks: -# - migrations -# ports: -# - '19200:9200' + elasticsearch: + image: 'migrations/elasticsearch_searchguard:latest' + networks: + - migrations + ports: + - '19200:9200' zookeeper: image: docker.io/bitnami/zookeeper:3.8 diff --git a/TrafficCapture/nettyWireLogging/build.gradle b/TrafficCapture/nettyWireLogging/build.gradle index 1a3c2bb64..523fe0078 100644 --- a/TrafficCapture/nettyWireLogging/build.gradle +++ b/TrafficCapture/nettyWireLogging/build.gradle @@ -13,7 +13,9 @@ dependencies { implementation project(':captureOffloader') implementation project(':coreUtilities') - api group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' + api group: 'io.netty', name: 'netty-buffer' + api group: 'io.netty', name: 'netty-codec-http' + api group: 'io.netty', name: 'netty-handler' implementation group: 'io.opentelemetry', name:'opentelemetry-api' implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index 7b5161ed6..52e745230 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -3,6 +3,8 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import io.netty.util.ReferenceCountUtil; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.context.Context; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; @@ -13,9 +15,10 @@ public class ConditionallyReliableLoggingHttpRequestHandler extends LoggingHttpRequestHandler { private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpRequestHandler(IChannelConnectionCaptureSerializer trafficOffloader, + public ConditionallyReliableLoggingHttpRequestHandler(Context incomingContext, + IChannelConnectionCaptureSerializer trafficOffloader, Predicate headerPredicateForWhenToBlock) { - super(trafficOffloader); + super(incomingContext, trafficOffloader); this.shouldBlockPredicate = headerPredicateForWhenToBlock; } @@ -23,12 +26,22 @@ public ConditionallyReliableLoggingHttpRequestHandler(IChannelConnectionCaptureS protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { if (shouldBlockPredicate.test(httpRequest)) { + var blockingSpan = METERING_CLOSURE_OP.map(m->{ + m.meterIncrementEvent(telemetryContext, "blockingRequestUntilFlush"); + try (var namedOnlyForAutoClose = telemetryContext.makeCurrent()) { + return GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("blockedOnFlush").startSpan(); + }}).orElse(null); trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { + METERING_CLOSURE_OP.ifPresent(m->{ + blockingSpan.end(); + m.meterIncrementEvent(telemetryContext, t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); + }); if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users // could set as needed. Some users may be fine with just logging a failed offloading of a request // where other users may want to stop entirely. JIRA here: https://opensearch.atlassian.net/browse/MIGRATIONS-1276 - log.warn("Got error: " + t.getMessage()); + log.warn("Dropping request - Got error: " + t.getMessage()); ReferenceCountUtil.release(msg); } else { try { @@ -39,6 +52,9 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob } }); } else { + METERING_CLOSURE_OP.ifPresent(m->{ + m.meterIncrementEvent(telemetryContext, "nonBlockingRequest"); + }); super.channelFinishedReadingAnHttpMessage(ctx, msg, httpRequest); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index de5b0256e..f47b7de8a 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -13,6 +13,9 @@ import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpVersion; import io.netty.handler.codec.http.LastHttpContent; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Context; import lombok.Getter; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; @@ -22,9 +25,13 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import java.time.Instant; +import java.util.Optional; @Slf4j public class LoggingHttpRequestHandler extends ChannelInboundHandlerAdapter { + public static final String TELEMETRY_SCOPE_NAME = "LoggingHttpInboundHandler"; + public static final Optional METERING_CLOSURE_OP = + Optional.of(new MetricsLogger.SimpleMeteringClosure(TELEMETRY_SCOPE_NAME)); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); static class SimpleHttpRequestDecoder extends HttpRequestDecoder { @@ -72,9 +79,21 @@ public HttpRequest resetCurrentRequest() { protected final EmbeddedChannel httpDecoderChannel; protected final SimpleHttpRequestDecoder requestDecoder; - - - public LoggingHttpRequestHandler(IChannelConnectionCaptureSerializer trafficOffloader) { + protected final Context telemetryContext; + private final Instant createdTime; + + + public LoggingHttpRequestHandler(Context incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { + this.createdTime = Instant.now(); + telemetryContext = METERING_CLOSURE_OP.map(m->{ + try (var scope = incomingContext.makeCurrent()) { + var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("frontendConnection").startSpan(); + var ctx = incomingContext.with(span); + m.meterIncrementEvent(ctx, "requestStarted"); + return ctx; + } + }).orElse(null); this.trafficOffloader = trafficOffloader; requestDecoder = new SimpleHttpRequestDecoder(); // as a field for easier debugging httpDecoderChannel = new EmbeddedChannel( @@ -85,9 +104,12 @@ public LoggingHttpRequestHandler(IChannelConnectionCaptureSerializer trafficO private HttpProcessedState parseHttpMessageParts(ByteBuf msg) { httpDecoderChannel.writeInbound(msg); // Consume this outright, up to the caller to know what else to do - return getHandlerThatHoldsParsedHttpRequest().isDone ? + var state = getHandlerThatHoldsParsedHttpRequest().isDone ? HttpProcessedState.FULL_MESSAGE : HttpProcessedState.ONGOING; + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, + state == HttpProcessedState.FULL_MESSAGE ? "requestFullyParsed" : "requestPartiallyParsed")); + return state; } private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @@ -97,6 +119,7 @@ private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "unregistered")); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -113,6 +136,10 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { + METERING_CLOSURE_OP.ifPresent(m->{ + m.meterIncrementEvent(telemetryContext, "handlerRemoved"); + Span.fromContext(telemetryContext).end(); + }); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -128,6 +155,7 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { super.channelRead(ctx, msg); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "requestReceived")); metricsLogger.atSuccess(MetricsEvent.RECEIVED_FULL_HTTP_REQUEST) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()) .setAttribute(MetricsAttributeKey.HTTP_METHOD, httpRequest.method().toString()) @@ -141,6 +169,10 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { var bb = ((ByteBuf) msg).retainedDuplicate(); trafficOffloader.addReadEvent(timestamp, bb); + METERING_CLOSURE_OP.ifPresent(m-> { + m.meterIncrementEvent(telemetryContext, "read"); + m.meterIncrementEvent(telemetryContext, "readBytes", bb.readableBytes()); + }); metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); @@ -164,6 +196,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "exception")); httpDecoderChannel.close(); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java index 66d8912a4..9bb33fd22 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java @@ -4,6 +4,9 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelOutboundHandlerAdapter; import io.netty.channel.ChannelPromise; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Context; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; @@ -11,66 +14,102 @@ import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import java.net.SocketAddress; +import java.time.Duration; import java.time.Instant; +import java.util.Optional; @Slf4j public class LoggingHttpResponseHandler extends ChannelOutboundHandlerAdapter { - - private final IChannelConnectionCaptureSerializer trafficOffloader; + public static final String TELEMETRY_SCOPE_NAME = "LoggingHttpOutboundHandler"; + public static final Optional METERING_CLOSURE_OP = + Optional.of(new MetricsLogger.SimpleMeteringClosure(TELEMETRY_SCOPE_NAME)); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpResponseHandler"); + private final IChannelConnectionCaptureSerializer trafficOffloader; + private Context telemetryContext; + private Instant connectTime; - public LoggingHttpResponseHandler(IChannelConnectionCaptureSerializer trafficOffloader) { + public LoggingHttpResponseHandler(Context incomingContext, + IChannelConnectionCaptureSerializer trafficOffloader) { this.trafficOffloader = trafficOffloader; + this.telemetryContext = incomingContext; } @Override public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) throws Exception { trafficOffloader.addBindEvent(Instant.now(), localAddress); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "bind")); super.bind(ctx, localAddress, promise); } @Override public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); + + METERING_CLOSURE_OP.ifPresent(m->{ + var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("backendConnection").startSpan(); + telemetryContext = telemetryContext.with(span); + connectTime = Instant.now(); + + m.meterIncrementEvent(telemetryContext, "connect"); + m.meterDeltaEvent(telemetryContext, "connections", 1); + }); + super.connect(ctx, remoteAddress, localAddress, promise); } @Override public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { trafficOffloader.addDisconnectEvent(Instant.now()); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "disconnect")); super.disconnect(ctx, promise); } @Override public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - super.close(ctx, promise); + + METERING_CLOSURE_OP.ifPresent(m-> { + m.meterIncrementEvent(telemetryContext, "close"); + m.meterDeltaEvent(telemetryContext, "connections", -1); + m.meterHistogramMillis(telemetryContext, "connectionDuration", + Duration.between(connectTime, Instant.now())); + Span.fromContext(telemetryContext).end(); + }); } @Override public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { trafficOffloader.addDeregisterEvent(Instant.now()); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "deregister")); super.deregister(ctx, promise); } @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { - trafficOffloader.addWriteEvent(Instant.now(), (ByteBuf) msg); + var bb = (ByteBuf) msg; + trafficOffloader.addWriteEvent(Instant.now(), bb); metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); + METERING_CLOSURE_OP.ifPresent(m->{ + m.meterIncrementEvent(telemetryContext, "write"); + m.meterIncrementEvent(telemetryContext, "writeBytes", bb.readableBytes()); + }); super.write(ctx, msg, promise); } @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { flush(ctx); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "removed")); super.handlerRemoved(ctx); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); + METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "exception")); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index 3110d0188..76a2cc762 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -3,10 +3,19 @@ import com.google.protobuf.CodedOutputStream; import io.netty.buffer.ByteBuf; import io.netty.channel.embedded.EmbeddedChannel; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; +import io.opentelemetry.sdk.trace.data.SpanData; import lombok.AllArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.opensearch.migrations.testutils.TestUtilities; @@ -33,6 +42,10 @@ @Slf4j public class ConditionallyReliableLoggingHttpRequestHandlerTest { + @RegisterExtension + static final OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); + private final Tracer tracer = otelTesting.getOpenTelemetry().getTracer("test"); + private final Meter meter = otelTesting.getOpenTelemetry().getMeter("test"); @AllArgsConstructor static class StreamManager extends OrderedStreamLifecyleManager { @@ -74,8 +87,12 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumertrue)); // true: block every request + new ConditionallyReliableLoggingHttpRequestHandler(telemetryCtx, offloader, + x->true)); // true: block every request channelWriter.accept(channel); // we wrote the correct data to the downstream handler/channel @@ -98,6 +115,9 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer extends ChannelInitializer { + static final ContextKey CONNECTION_ID_KEY = ContextKey.named("connectionId"); private final IConnectionCaptureFactory connectionCaptureFactory; private final Supplier sslEngineProvider; @@ -41,9 +44,11 @@ protected void initChannel(SocketChannel ch) throws IOException { ch.pipeline().addLast(new SslHandler(sslEngineProvider.get())); } - var offloader = connectionCaptureFactory.createOffloader(ch.id().asLongText()); - ch.pipeline().addLast(new LoggingHttpResponseHandler(offloader)); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpRequestHandler(offloader, + var connectionId = ch.id().asLongText(); + var offloader = connectionCaptureFactory.createOffloader(connectionId); + var ctx = Context.current().with(CONNECTION_ID_KEY, connectionId); + ch.pipeline().addLast(new LoggingHttpResponseHandler<>(ctx, offloader)); + ch.pipeline().addLast(new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, this::shouldGuaranteeMessageOffloading)); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/resources/logging.properties b/TrafficCapture/trafficCaptureProxyServer/src/main/resources/logging.properties new file mode 100644 index 000000000..42fe83cdd --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/resources/logging.properties @@ -0,0 +1,9 @@ +# Set the global logging level for all loggers +.level=FINE + +# Configure the console handler (or other handlers if you use them) +handlers=java.util.logging.ConsoleHandler + +# Set the logging level for the console handler +java.util.logging.ConsoleHandler.level=FINE +java.util.logging.ConsoleHandler.formatter=java.util.logging.SimpleFormatter \ No newline at end of file From a8ae3d12617cdc067795033c6cbb39666fcb8de6 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 27 Nov 2023 22:31:30 -0500 Subject: [PATCH 05/94] Restore the docker-compose single-node/multi-node split docker-compose config hierarchy. This was broken from the merge https://github.com/opensearch-project/opensearch-migrations/pull/376/files#diff-430f89dc33402ecf692b9a8372f66e585bb2f9215596433216580efc2a56795c. Signed-off-by: Greg Schohn --- .../src/main/docker/docker-compose-single.yml | 4 +-- .../src/main/docker/docker-compose.yml | 36 ------------------- 2 files changed, 2 insertions(+), 38 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose-single.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose-single.yml index febda3d42..0ae8ffbcb 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose-single.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose-single.yml @@ -14,7 +14,7 @@ services: - http.port=19200 - discovery.type=single-node # Run processes for elasticsearch and capture proxy, and exit if either one ends - command: /bin/sh -c '/usr/local/bin/docker-entrypoint.sh eswrapper & /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://localhost:19200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml & wait -n 1' + command: /bin/sh -c '/usr/local/bin/docker-entrypoint.sh eswrapper & /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://localhost:19200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317 & wait -n 1' depends_on: - kafka @@ -25,7 +25,7 @@ services: # - migrations # ports: # - "9200:9200" -# command: /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://elasticsearch:9200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml +# command: /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://elasticsearch:9200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317 --otelCollectorEndpoint http://otel-collector:4317 # depends_on: # - kafka # - elasticsearch diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index 98625caa9..ef31558c7 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -53,42 +53,6 @@ services: - jaeger-all-in-one - zipkin-all-in-one - # Run combined instance of Capture Proxy and Elasticsearch -# capture-proxy-es: -# image: 'migrations/capture_proxy:latest' -# networks: -# - migrations -# ports: -# - "9200:9200" -# - "19200:19200" -# volumes: -# - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs -# environment: -# - http.port=19200 -# # Run processes for elasticsearch and capture proxy, and exit if either one ends -# command: /bin/sh -c '/usr/local/bin/docker-entrypoint.sh eswrapper & /runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://localhost:19200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317 & wait -n 1' -# depends_on: -# - kafka - -# Run separate instances of Capture Proxy and Elasticsearch - capture-proxy: - image: 'migrations/capture_proxy:latest' - networks: - - migrations - ports: - - "9200:9200" - command: /bin/sh -c "/runJavaWithClasspath.sh org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy --kafkaConnection kafka:9092 --destinationUri https://elasticsearch:9200 --insecureDestination --listenPort 9200 --sslConfigFile /usr/share/elasticsearch/config/proxy_tls.yml --otelCollectorEndpoint http://otel-collector:4317" - depends_on: - - kafka - - elasticsearch - - elasticsearch: - image: 'migrations/elasticsearch_searchguard:latest' - networks: - - migrations - ports: - - '19200:9200' - zookeeper: image: docker.io/bitnami/zookeeper:3.8 networks: From da9d36bf8fe889320fe5bde3ea27b12af32b5904 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 27 Nov 2023 22:32:18 -0500 Subject: [PATCH 06/94] Add labels to each metric instrument so that multiple values can be plotted within the same graph in prometheus. Signed-off-by: Greg Schohn --- .../migrations/coreutils/MetricsLogger.java | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java index ea116fc47..f516c9de3 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.Tracer; @@ -101,13 +102,19 @@ public void meterIncrementEvent(Context ctx, String eventName) { public void meterIncrementEvent(Context ctx, String eventName, long increment) { if (ctx == null) { return; } try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.counterBuilder(eventName).build().add(increment); + meter.counterBuilder(eventName) + .build().add(increment, Attributes.builder() + .put("labelName", eventName) + .build()); } } public void meterDeltaEvent(Context ctx, String eventName, long delta) { if (ctx == null) { return; } try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.upDownCounterBuilder(eventName).build().add(delta); + meter.upDownCounterBuilder(eventName) + .build().add(delta, Attributes.builder() + .put("labelName", eventName) + .build()); } } public void meterHistogramMillis(Context ctx, String eventName, Duration between) { @@ -116,7 +123,10 @@ public void meterHistogramMillis(Context ctx, String eventName, Duration between public void meterHistogram(Context ctx, String eventName, double value) { if (ctx == null) { return; } try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.histogramBuilder(eventName).build().record(value); + meter.histogramBuilder(eventName) + .build().record(value, Attributes.builder() + .put("labelName", eventName) + .build()); } } } From 06618caa8e31faa65acccf311f293978854cd7c3 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 27 Nov 2023 23:13:49 -0500 Subject: [PATCH 07/94] Move the MetricsClosure into its own class and stop stuffing the metrics into an optional. Dropping the optionals makes the code simpler and if we don't want to do logging, we can just not fill in the configuration for the SDK. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 77 +++++------ .../migrations/coreutils/MetricsLogger.java | 90 ------------- .../coreutils/SimpleMeteringClosure.java | 123 ++++++++++++++++++ ...allyReliableLoggingHttpRequestHandler.java | 25 ++-- .../netty/LoggingHttpRequestHandler.java | 46 +++---- .../netty/LoggingHttpResponseHandler.java | 49 +++---- .../proxyserver/CaptureProxy.java | 4 +- .../migrations/replay/TrafficReplayer.java | 2 +- 8 files changed, 216 insertions(+), 200 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index a3696fefe..4a94d93d9 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -14,6 +14,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; @@ -26,7 +27,6 @@ import java.time.Duration; import java.time.Instant; import java.util.Arrays; -import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -37,8 +37,7 @@ public class KafkaCaptureFactory implements IConnectionCaptureFactory TOPIC_KEY = ContextKey.named("topic"); private static final ContextKey RECORD_SIZE_KEY = ContextKey.named("recordSize"); public static final String TELEMETRY_SCOPE_NAME = "KafkaCapture"; - public static final Optional METERING_CLOSURE_OP = - Optional.of(new MetricsLogger.SimpleMeteringClosure(TELEMETRY_SCOPE_NAME)); + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("BacksideHandler"); @@ -67,15 +66,13 @@ public KafkaCaptureFactory(String nodeId, Producer producer, int @Override public IChannelConnectionCaptureSerializer createOffloader(String connectionId) { - var context = METERING_CLOSURE_OP.map(m->{ - Span offloaderSpan = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("offloader").startSpan(); - offloaderSpan.setAttribute("offloaderConnectionId", connectionId); - var c = Context.current().with(offloaderSpan); - m.meterIncrementEvent(c, "offloader_created"); - m.meterDeltaEvent(c, "offloaders_active", 1); - return c; - }).orElse(null); + Span offloaderSpan = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("offloader").startSpan(); + offloaderSpan.setAttribute("offloaderConnectionId", connectionId); + var context = Context.current().with(offloaderSpan); + METERING_CLOSURE.meterIncrementEvent(context, "offloader_created"); + METERING_CLOSURE.meterDeltaEvent(context, "offloaders_active", 1); + return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager(context, connectionId)); } @@ -104,23 +101,23 @@ public StreamManager(Context incomingTelemetryContext, String connectionId) { @Override public void close() throws IOException { - METERING_CLOSURE_OP.ifPresent(m->{ - m.meterHistogramMillis(telemetryContext, "connection_lifetime", + METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connection_lifetime", Duration.between(startTime, Instant.now())); - m.meterDeltaEvent(telemetryContext, "offloaders_active", -1); - m.meterIncrementEvent(telemetryContext, "offloader_closed"); - }); + METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", -1); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_closed"); + Span.fromContext(telemetryContext).end(); } @Override public CodedOutputStreamWrapper createStream() { - var newStreamCtx = METERING_CLOSURE_OP.map(m-> { - m.meterIncrementEvent(telemetryContext, "stream_created"); - try (var scope = telemetryContext.makeCurrent()) { - return Context.current().with(m.tracer.spanBuilder("recordStream").startSpan()); - } - }).orElse(null); + Context newStreamCtx; + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_created"); + try (var scope = telemetryContext.makeCurrent()) { + newStreamCtx = Context.current() + .with(METERING_CLOSURE.tracer.spanBuilder("recordStream").startSpan()); + } + ByteBuffer bb = ByteBuffer.allocate(bufferSize); return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb, newStreamCtx); } @@ -144,16 +141,16 @@ public CodedOutputStreamWrapper createStream() { var cf = new CompletableFuture(); log.debug("Sending Kafka producer record: {} for topic: {}", recordId, topicNameForTraffic); - var flushContext = METERING_CLOSURE_OP.map(m-> { - Span.fromContext(osh.streamContext).end(); - try (var scope = telemetryContext - .with(RECORD_ID_KEY, recordId) - .with(TOPIC_KEY, topicNameForTraffic) - .with(RECORD_SIZE_KEY, kafkaRecord.value().length).makeCurrent()) { - m.meterIncrementEvent(telemetryContext, "stream_flush_called"); - return Context.current().with(m.tracer.spanBuilder("flushRecord").startSpan()); - } - }).orElse(null); + Context flushContext; + Span.fromContext(osh.streamContext).end(); + try (var scope = telemetryContext + .with(RECORD_ID_KEY, recordId) + .with(TOPIC_KEY, topicNameForTraffic) + .with(RECORD_SIZE_KEY, kafkaRecord.value().length).makeCurrent()) { + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); + flushContext = Context.current() + .with(METERING_CLOSURE.tracer.spanBuilder("flushRecord").startSpan()); + } // Async request to Kafka cluster producer.send(kafkaRecord, handleProducerRecordSent(cf, recordId, flushContext)); @@ -184,14 +181,12 @@ public CodedOutputStreamWrapper createStream() { private Callback handleProducerRecordSent(CompletableFuture cf, String recordId, Context flushContext) { return (metadata, exception) -> { - METERING_CLOSURE_OP.ifPresent(m-> { - m.meterIncrementEvent(telemetryContext, - exception==null ? "stream_flush_success" : "stream_flush_failure"); - m.meterIncrementEvent(telemetryContext, - exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", - flushContext.get(RECORD_SIZE_KEY)); - Span.fromContext(flushContext).end(); - }); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, + exception==null ? "stream_flush_success" : "stream_flush_failure"); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, + exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", + flushContext.get(RECORD_SIZE_KEY)); + Span.fromContext(flushContext).end(); if (exception != null) { log.error("Error sending producer record: {}", recordId, exception); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java index f516c9de3..ef61af2d6 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java @@ -4,7 +4,6 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.context.Context; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; @@ -42,95 +41,6 @@ public MetricsLogger(String source) { logger = LoggerFactory.getLogger(String.format("MetricsLogger.%s", source)); } - public static void initializeOpenTelemetry(String serviceName, String collectorEndpoint) { - var serviceResource = Resource.getDefault().toBuilder() - .put(ResourceAttributes.SERVICE_NAME, serviceName) - .build(); - - OpenTelemetrySdk openTelemetrySdk = - OpenTelemetrySdk.builder() - .setLoggerProvider( - SdkLoggerProvider.builder() - .setResource(serviceResource) - .addLogRecordProcessor( - BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.builder() - .setEndpoint(collectorEndpoint) - .build()) - .build()) - .build()) - .setTracerProvider( - SdkTracerProvider.builder() - .setResource(serviceResource) - .addSpanProcessor( - BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.builder() - .setEndpoint(collectorEndpoint) - .setTimeout(2, TimeUnit.SECONDS) - .build()) - .setScheduleDelay(100, TimeUnit.MILLISECONDS) - .build()) - .build()) - .setMeterProvider( - SdkMeterProvider.builder() - .setResource(serviceResource) - .registerMetricReader( - PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.builder() - .setEndpoint(collectorEndpoint) - .build()) - .setInterval(Duration.ofMillis(1000)) - .build()) - .build()) - .buildAndRegisterGlobal(); - - // Add hook to close SDK, which flushes logs - Runtime.getRuntime().addShutdownHook(new Thread(openTelemetrySdk::close)); - //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); - } - - public static class SimpleMeteringClosure { - public final Meter meter; - public final Tracer tracer; - public SimpleMeteringClosure(String scopeName) { - meter = GlobalOpenTelemetry.getMeter(scopeName); - tracer = GlobalOpenTelemetry.getTracer(scopeName); - } - public void meterIncrementEvent(Context ctx, String eventName) { - meterIncrementEvent(ctx, eventName, 1); - } - public void meterIncrementEvent(Context ctx, String eventName, long increment) { - if (ctx == null) { return; } - try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.counterBuilder(eventName) - .build().add(increment, Attributes.builder() - .put("labelName", eventName) - .build()); - } - } - public void meterDeltaEvent(Context ctx, String eventName, long delta) { - if (ctx == null) { return; } - try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.upDownCounterBuilder(eventName) - .build().add(delta, Attributes.builder() - .put("labelName", eventName) - .build()); - } - } - public void meterHistogramMillis(Context ctx, String eventName, Duration between) { - meterHistogram(ctx, eventName, (double) between.toMillis()); - } - public void meterHistogram(Context ctx, String eventName, double value) { - if (ctx == null) { return; } - try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.histogramBuilder(eventName) - .build().record(value, Attributes.builder() - .put("labelName", eventName) - .build()); - } - } - } - /** * To indicate a successful event (e.g. data received or data sent) that may be a helpful * metric, this method can be used to return a LoggingEventBuilder. The LoggingEventBuilder diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java new file mode 100644 index 000000000..3ef604072 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java @@ -0,0 +1,123 @@ +package org.opensearch.migrations.coreutils; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; +import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; + +public class SimpleMeteringClosure { + public final Meter meter; + public final Tracer tracer; + + public SimpleMeteringClosure(String scopeName) { + meter = GlobalOpenTelemetry.getMeter(scopeName); + tracer = GlobalOpenTelemetry.getTracer(scopeName); + } + + public static void initializeOpenTelemetry(String serviceName, String collectorEndpoint) { + var serviceResource = Resource.getDefault().toBuilder() + .put(ResourceAttributes.SERVICE_NAME, serviceName) + .build(); + + OpenTelemetrySdk openTelemetrySdk = + OpenTelemetrySdk.builder() + .setLoggerProvider( + SdkLoggerProvider.builder() + .setResource(serviceResource) + .addLogRecordProcessor( + BatchLogRecordProcessor.builder( + OtlpGrpcLogRecordExporter.builder() + .setEndpoint(collectorEndpoint) + .build()) + .build()) + .build()) + .setTracerProvider( + SdkTracerProvider.builder() + .setResource(serviceResource) + .addSpanProcessor( + BatchSpanProcessor.builder( + OtlpGrpcSpanExporter.builder() + .setEndpoint(collectorEndpoint) + .setTimeout(2, TimeUnit.SECONDS) + .build()) + .setScheduleDelay(100, TimeUnit.MILLISECONDS) + .build()) + .build()) + .setMeterProvider( + SdkMeterProvider.builder() + .setResource(serviceResource) + .registerMetricReader( + PeriodicMetricReader.builder( + OtlpGrpcMetricExporter.builder() + .setEndpoint(collectorEndpoint) + .build()) + .setInterval(Duration.ofMillis(1000)) + .build()) + .build()) + .buildAndRegisterGlobal(); + + // Add hook to close SDK, which flushes logs + Runtime.getRuntime().addShutdownHook(new Thread(openTelemetrySdk::close)); + //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); + } + + public void meterIncrementEvent(Context ctx, String eventName) { + meterIncrementEvent(ctx, eventName, 1); + } + + public void meterIncrementEvent(Context ctx, String eventName, long increment) { + if (ctx == null) { + return; + } + try (var namedOnlyForAutoClose = ctx.makeCurrent()) { + meter.counterBuilder(eventName) + .build().add(increment, Attributes.builder() + .put("labelName", eventName) + .build()); + } + } + + public void meterDeltaEvent(Context ctx, String eventName, long delta) { + if (ctx == null) { + return; + } + try (var namedOnlyForAutoClose = ctx.makeCurrent()) { + meter.upDownCounterBuilder(eventName) + .build().add(delta, Attributes.builder() + .put("labelName", eventName) + .build()); + } + } + + public void meterHistogramMillis(Context ctx, String eventName, Duration between) { + meterHistogram(ctx, eventName, (double) between.toMillis()); + } + + public void meterHistogram(Context ctx, String eventName, double value) { + if (ctx == null) { + return; + } + try (var namedOnlyForAutoClose = ctx.makeCurrent()) { + meter.histogramBuilder(eventName) + .build().record(value, Attributes.builder() + .put("labelName", eventName) + .build()); + } + } +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index 52e745230..41ee43295 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -4,6 +4,7 @@ import io.netty.handler.codec.http.HttpRequest; import io.netty.util.ReferenceCountUtil; import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.trace.Span; import io.opentelemetry.context.Context; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; @@ -26,17 +27,17 @@ public ConditionallyReliableLoggingHttpRequestHandler(Context incomingContext, protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { if (shouldBlockPredicate.test(httpRequest)) { - var blockingSpan = METERING_CLOSURE_OP.map(m->{ - m.meterIncrementEvent(telemetryContext, "blockingRequestUntilFlush"); - try (var namedOnlyForAutoClose = telemetryContext.makeCurrent()) { - return GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("blockedOnFlush").startSpan(); - }}).orElse(null); + Span blockingSpan; + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "blockingRequestUntilFlush"); + try (var namedOnlyForAutoClose = telemetryContext.makeCurrent()) { + blockingSpan = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("blockedOnFlush").startSpan(); + } trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { - METERING_CLOSURE_OP.ifPresent(m->{ - blockingSpan.end(); - m.meterIncrementEvent(telemetryContext, t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); - }); + blockingSpan.end(); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, + t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); + if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users // could set as needed. Some users may be fine with just logging a failed offloading of a request @@ -52,9 +53,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob } }); } else { - METERING_CLOSURE_OP.ifPresent(m->{ - m.meterIncrementEvent(telemetryContext, "nonBlockingRequest"); - }); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "nonBlockingRequest"); super.channelFinishedReadingAnHttpMessage(ctx, msg, httpRequest); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index f47b7de8a..0d6086c4a 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -21,17 +21,16 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; import java.time.Instant; -import java.util.Optional; @Slf4j public class LoggingHttpRequestHandler extends ChannelInboundHandlerAdapter { public static final String TELEMETRY_SCOPE_NAME = "LoggingHttpInboundHandler"; - public static final Optional METERING_CLOSURE_OP = - Optional.of(new MetricsLogger.SimpleMeteringClosure(TELEMETRY_SCOPE_NAME)); + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); static class SimpleHttpRequestDecoder extends HttpRequestDecoder { @@ -85,15 +84,13 @@ public HttpRequest resetCurrentRequest() { public LoggingHttpRequestHandler(Context incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { this.createdTime = Instant.now(); - telemetryContext = METERING_CLOSURE_OP.map(m->{ - try (var scope = incomingContext.makeCurrent()) { - var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("frontendConnection").startSpan(); - var ctx = incomingContext.with(span); - m.meterIncrementEvent(ctx, "requestStarted"); - return ctx; - } - }).orElse(null); + try (var scope = incomingContext.makeCurrent()) { + var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("frontendConnection").startSpan(); + telemetryContext = incomingContext.with(span); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "requestStarted"); + } + this.trafficOffloader = trafficOffloader; requestDecoder = new SimpleHttpRequestDecoder(); // as a field for easier debugging httpDecoderChannel = new EmbeddedChannel( @@ -107,8 +104,8 @@ private HttpProcessedState parseHttpMessageParts(ByteBuf msg) { var state = getHandlerThatHoldsParsedHttpRequest().isDone ? HttpProcessedState.FULL_MESSAGE : HttpProcessedState.ONGOING; - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, - state == HttpProcessedState.FULL_MESSAGE ? "requestFullyParsed" : "requestPartiallyParsed")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, + state == HttpProcessedState.FULL_MESSAGE ? "requestFullyParsed" : "requestPartiallyParsed"); return state; } @@ -119,7 +116,7 @@ private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "unregistered")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "unregistered"); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -136,10 +133,9 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { - METERING_CLOSURE_OP.ifPresent(m->{ - m.meterIncrementEvent(telemetryContext, "handlerRemoved"); - Span.fromContext(telemetryContext).end(); - }); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "handlerRemoved"); + Span.fromContext(telemetryContext).end(); + trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -155,7 +151,8 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { super.channelRead(ctx, msg); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "requestReceived")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "requestReceived"); + metricsLogger.atSuccess(MetricsEvent.RECEIVED_FULL_HTTP_REQUEST) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()) .setAttribute(MetricsAttributeKey.HTTP_METHOD, httpRequest.method().toString()) @@ -169,10 +166,9 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { var bb = ((ByteBuf) msg).retainedDuplicate(); trafficOffloader.addReadEvent(timestamp, bb); - METERING_CLOSURE_OP.ifPresent(m-> { - m.meterIncrementEvent(telemetryContext, "read"); - m.meterIncrementEvent(telemetryContext, "readBytes", bb.readableBytes()); - }); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "read"); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "readBytes", bb.readableBytes()); + metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); @@ -196,7 +192,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "exception")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "exception"); httpDecoderChannel.close(); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java index 9bb33fd22..ed8a8d55c 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java @@ -11,18 +11,17 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import java.net.SocketAddress; import java.time.Duration; import java.time.Instant; -import java.util.Optional; @Slf4j public class LoggingHttpResponseHandler extends ChannelOutboundHandlerAdapter { public static final String TELEMETRY_SCOPE_NAME = "LoggingHttpOutboundHandler"; - public static final Optional METERING_CLOSURE_OP = - Optional.of(new MetricsLogger.SimpleMeteringClosure(TELEMETRY_SCOPE_NAME)); + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpResponseHandler"); private final IChannelConnectionCaptureSerializer trafficOffloader; @@ -38,7 +37,7 @@ public LoggingHttpResponseHandler(Context incomingContext, @Override public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) throws Exception { trafficOffloader.addBindEvent(Instant.now(), localAddress); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "bind")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "bind"); super.bind(ctx, localAddress, promise); } @@ -46,15 +45,12 @@ public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelP public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); - METERING_CLOSURE_OP.ifPresent(m->{ - var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("backendConnection").startSpan(); - telemetryContext = telemetryContext.with(span); - connectTime = Instant.now(); - - m.meterIncrementEvent(telemetryContext, "connect"); - m.meterDeltaEvent(telemetryContext, "connections", 1); - }); + var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) + .spanBuilder("backendConnection").startSpan(); + telemetryContext = telemetryContext.with(span); + connectTime = Instant.now(); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "connect"); + METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", 1); super.connect(ctx, remoteAddress, localAddress, promise); } @@ -62,7 +58,7 @@ public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, Sock @Override public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { trafficOffloader.addDisconnectEvent(Instant.now()); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "disconnect")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "disconnect"); super.disconnect(ctx, promise); } @@ -70,19 +66,17 @@ public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - METERING_CLOSURE_OP.ifPresent(m-> { - m.meterIncrementEvent(telemetryContext, "close"); - m.meterDeltaEvent(telemetryContext, "connections", -1); - m.meterHistogramMillis(telemetryContext, "connectionDuration", - Duration.between(connectTime, Instant.now())); - Span.fromContext(telemetryContext).end(); - }); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "close"); + METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", -1); + METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connectionDuration", + Duration.between(connectTime, Instant.now())); + Span.fromContext(telemetryContext).end(); } @Override public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { trafficOffloader.addDeregisterEvent(Instant.now()); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "deregister")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "deregister"); super.deregister(ctx, promise); } @@ -92,24 +86,23 @@ public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) trafficOffloader.addWriteEvent(Instant.now(), bb); metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); - METERING_CLOSURE_OP.ifPresent(m->{ - m.meterIncrementEvent(telemetryContext, "write"); - m.meterIncrementEvent(telemetryContext, "writeBytes", bb.readableBytes()); - }); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "write"); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "writeBytes", bb.readableBytes()); + super.write(ctx, msg, promise); } @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { flush(ctx); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "removed")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "removed"); super.handlerRemoved(ctx); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - METERING_CLOSURE_OP.ifPresent(m->m.meterIncrementEvent(telemetryContext, "exception")); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "exception"); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 2d400d036..60de2bcff 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -17,7 +17,7 @@ import org.apache.kafka.common.config.SaslConfigs; import org.apache.logging.log4j.core.util.NullOutputStream; import org.opensearch.common.settings.Settings; -import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.FileConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; @@ -284,7 +284,7 @@ public static void main(String[] args) throws InterruptedException, IOException var backsideUri = convertStringToUri(params.backsideUriString); if (params.otelCollectorEndpoint != null) { - MetricsLogger.initializeOpenTelemetry("capture-proxy", params.otelCollectorEndpoint); + SimpleMeteringClosure.initializeOpenTelemetry("capture-proxy", params.otelCollectorEndpoint); } var sksOp = Optional.ofNullable(params.sslConfigFilePath) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index c1cc0d8a7..d92f34b0d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -67,7 +67,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.migrations.coreutils.MetricsLogger.initializeOpenTelemetry; +import static org.opensearch.migrations.coreutils.SimpleMeteringClosure.initializeOpenTelemetry; @Slf4j public class TrafficReplayer { From aba1aab9878256b5c00a0634f7c7532e1f394f5c Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 28 Nov 2023 18:53:15 -0500 Subject: [PATCH 08/94] WIP - Cleanup + get Jaeger to work by switching the endpoint. Also introduce some more typesafe wrappers for contexts. Lots more to come. Signed-off-by: Greg Schohn --- .../captureKafkaOffloader/build.gradle | 2 +- .../kafkaoffloader/KafkaCaptureFactory.java | 19 ++++-- TrafficCapture/coreUtilities/build.gradle | 8 ++- .../coreutils/SimpleMeteringClosure.java | 10 ++- .../src/main/docker/docker-compose.yml | 12 ++-- .../docker/otel-collector-config-demo.yaml | 33 ++++++--- ...allyReliableLoggingHttpRequestHandler.java | 22 ++++-- .../netty/LoggingHttpRequestHandler.java | 3 +- .../replay/AddCompressionEncodingTest.java | 2 +- .../trafficCaptureProxyServer/build.gradle | 8 ++- .../proxyserver/CaptureProxy.java | 2 +- TrafficCapture/trafficReplayer/build.gradle | 9 ++- .../replay/AccumulationCallbacks.java | 14 ++-- ...edTrafficToHttpTransactionAccumulator.java | 67 ++++++++++++------- .../replay/ClientConnectionPool.java | 19 ++++-- ...acketToTransformingHttpHandlerFactory.java | 3 +- .../migrations/replay/ReplayEngine.java | 11 +-- .../replay/RequestSenderOrchestrator.java | 35 +++++----- .../migrations/replay/TrafficReplayer.java | 32 ++++----- .../NettyPacketToHttpConsumer.java | 57 +++++++++------- .../http/HttpJsonTransformingConsumer.java | 22 +++--- ...dHttpRequestPreliminaryConvertHandler.java | 12 ++-- .../http/RequestPipelineOrchestrator.java | 9 +-- .../replay/tracing/ConnectionContext.java | 34 ++++++++++ .../replay/tracing/RequestContext.java | 26 +++++++ .../replay/tracing/WithAttributes.java | 9 +++ .../replay/HeaderTransformerTest.java | 7 +- .../replay/RequestSenderOrchestratorTest.java | 12 ++-- ...afficToHttpTransactionAccumulatorTest.java | 13 +++- .../replay/TrafficReplayerTest.java | 25 +++++-- .../NettyPacketToHttpConsumerTest.java | 14 ++-- .../HttpJsonTransformingConsumerTest.java | 8 +-- .../migrations/replay/TestRequestKey.java | 7 +- .../migrations/replay/TestUtils.java | 2 +- 34 files changed, 375 insertions(+), 193 deletions(-) create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index 65e332890..ab99a0327 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -15,7 +15,7 @@ dependencies { implementation project(':coreUtilities') implementation group: 'com.google.protobuf', name:'protobuf-java', version:'3.22.2' api group:'io.netty', name:'netty-buffer', version: '4.1.100.Final' - implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' + implementation group: 'io.opentelemetry', name:'opentelemetry-api' implementation group: 'org.projectlombok', name:'lombok', version:'1.18.26' implementation group: 'org.apache.kafka', name:'kafka-clients', version:'3.6.0' implementation group: 'org.slf4j', name:'slf4j-api', version:'2.0.7' diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 4a94d93d9..aaa694e55 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -36,6 +36,7 @@ public class KafkaCaptureFactory implements IConnectionCaptureFactory RECORD_ID_KEY = ContextKey.named("recordId"); private static final ContextKey TOPIC_KEY = ContextKey.named("topic"); private static final ContextKey RECORD_SIZE_KEY = ContextKey.named("recordSize"); + private static final ContextKey START_FLUSH_KEY = ContextKey.named("startKafkaSend"); public static final String TELEMETRY_SCOPE_NAME = "KafkaCapture"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); @@ -101,7 +102,8 @@ public StreamManager(Context incomingTelemetryContext, String connectionId) { @Override public void close() throws IOException { - METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connection_lifetime", + log.atInfo().setMessage(()->"factory.close()").log(); + METERING_CLOSURE.meterHistogramMillis(telemetryContext, "offloader_stream_lifetime", Duration.between(startTime, Instant.now())); METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", -1); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_closed"); @@ -111,12 +113,9 @@ public void close() throws IOException { @Override public CodedOutputStreamWrapper createStream() { - Context newStreamCtx; METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_created"); - try (var scope = telemetryContext.makeCurrent()) { - newStreamCtx = Context.current() - .with(METERING_CLOSURE.tracer.spanBuilder("recordStream").startSpan()); - } + var newStreamCtx = telemetryContext + .with(METERING_CLOSURE.tracer.spanBuilder("recordStream").startSpan()); ByteBuffer bb = ByteBuffer.allocate(bufferSize); return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb, newStreamCtx); @@ -146,7 +145,9 @@ public CodedOutputStreamWrapper createStream() { try (var scope = telemetryContext .with(RECORD_ID_KEY, recordId) .with(TOPIC_KEY, topicNameForTraffic) - .with(RECORD_SIZE_KEY, kafkaRecord.value().length).makeCurrent()) { + .with(RECORD_SIZE_KEY, kafkaRecord.value().length) + .with(START_FLUSH_KEY, Instant.now()) + .makeCurrent()) { METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); flushContext = Context.current() .with(METERING_CLOSURE.tracer.spanBuilder("flushRecord").startSpan()); @@ -181,6 +182,10 @@ public CodedOutputStreamWrapper createStream() { private Callback handleProducerRecordSent(CompletableFuture cf, String recordId, Context flushContext) { return (metadata, exception) -> { + log.atInfo().setMessage(()->"kafka completed sending a record").log(); + METERING_CLOSURE.meterHistogramMicros(telemetryContext, + exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms", + Duration.between(flushContext.get(START_FLUSH_KEY), Instant.now())); METERING_CLOSURE.meterIncrementEvent(telemetryContext, exception==null ? "stream_flush_success" : "stream_flush_failure"); METERING_CLOSURE.meterIncrementEvent(telemetryContext, diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index da76d0f15..355e2ec38 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -40,6 +40,8 @@ repositories { } dependencies { + implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation project(':captureProtobufs') implementation "com.google.protobuf:protobuf-java:3.22.2" @@ -56,9 +58,9 @@ dependencies { implementation("org.apache.logging.log4j:log4j-slf4j2-impl:2.20.0") // OpenTelemetry core - implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' - implementation group: 'io.opentelemetry', name:'opentelemetry-exporter-otlp', version: '1.30.0' - implementation group: 'io.opentelemetry', name:'opentelemetry-sdk', version: '1.30.0' + implementation group: 'io.opentelemetry', name:'opentelemetry-api' + implementation group: 'io.opentelemetry', name:'opentelemetry-exporter-otlp' + implementation group: 'io.opentelemetry', name:'opentelemetry-sdk' implementation group: 'io.opentelemetry.instrumentation', name:'opentelemetry-log4j-appender-2.17', version: '1.30.0-alpha' implementation group: 'io.opentelemetry', name:'opentelemetry-semconv', version: '1.30.0-alpha' diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java index 3ef604072..1ff13ec47 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java @@ -106,15 +106,21 @@ public void meterDeltaEvent(Context ctx, String eventName, long delta) { } public void meterHistogramMillis(Context ctx, String eventName, Duration between) { - meterHistogram(ctx, eventName, (double) between.toMillis()); + meterHistogram(ctx, eventName, "ms", between.toMillis()); } - public void meterHistogram(Context ctx, String eventName, double value) { + public void meterHistogramMicros(Context ctx, String eventName, Duration between) { + meterHistogram(ctx, eventName, "us", between.toNanos()*1000); + } + + public void meterHistogram(Context ctx, String eventName, String units, long value) { if (ctx == null) { return; } try (var namedOnlyForAutoClose = ctx.makeCurrent()) { meter.histogramBuilder(eventName) + .ofLongs() + .setUnit(units) .build().record(value, Attributes.builder() .put("labelName", eventName) .build()); diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index ef31558c7..1085d203f 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -13,19 +13,19 @@ services: - "9090:9090" # Jaeger - jaeger-all-in-one: + jaeger: image: jaegertracing/all-in-one:latest networks: - migrations ports: - "16686:16686" - - "14268" - - "14250:14250" + - "4317" + - "4318" environment: - COLLECTOR_OTLP_ENABLED=true # Zipkin - zipkin-all-in-one: + zipkin: image: openzipkin/zipkin:latest networks: - migrations @@ -50,8 +50,8 @@ services: - "55679:55679" # zpages extension - "4317:4317" # otlp receiver depends_on: - - jaeger-all-in-one - - zipkin-all-in-one + - jaeger + - zipkin zookeeper: image: docker.io/bitnami/zookeeper:3.8 diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml index 92582f7e8..99ac784a8 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml @@ -13,22 +13,33 @@ exporters: loglevel: debug zipkin: - endpoint: "http://zipkin-all-in-one:9411/api/v2/spans" + endpoint: "http://zipkin:9411/api/v2/spans" format: proto - otlp/jaeger: - endpoint: jaeger-all-in-one:14250 + otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 + endpoint: jaeger:4317 tls: insecure: true -# Alternatively, use jaeger_thrift_http with the settings below. In this case -# update the list of exporters on the traces pipeline. -# -# jaeger_thrift_http: -# url: http://jaeger-all-in-one:14268/api/traces processors: - batch: + # Processor to set the namespace based on the service name + attributes/nscapture: + actions: + - key: namespace + value: "capture" + action: insert + - key: service.name + value: "capture" + action: update + attributes/nsreplayer: + actions: + - key: namespace + value: "replay" + action: insert + - key: service.name + value: "replay" + action: update extensions: health_check: @@ -42,9 +53,9 @@ service: pipelines: traces: receivers: [otlp] - processors: [batch] + processors: [] exporters: [logging, zipkin, otlp/jaeger] metrics: receivers: [otlp] - processors: [batch] + processors: [attributes/nscapture, attributes/nsreplayer] exporters: [logging, prometheus] diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index 41ee43295..13b6edf87 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -6,14 +6,18 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.trace.Span; import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; +import java.time.Duration; +import java.time.Instant; import java.util.function.Predicate; @Slf4j public class ConditionallyReliableLoggingHttpRequestHandler extends LoggingHttpRequestHandler { + private ContextKey START_FLUSH_KEY = ContextKey.named("startTime"); private final Predicate shouldBlockPredicate; public ConditionallyReliableLoggingHttpRequestHandler(Context incomingContext, @@ -27,16 +31,22 @@ public ConditionallyReliableLoggingHttpRequestHandler(Context incomingContext, protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { if (shouldBlockPredicate.test(httpRequest)) { - Span blockingSpan; METERING_CLOSURE.meterIncrementEvent(telemetryContext, "blockingRequestUntilFlush"); - try (var namedOnlyForAutoClose = telemetryContext.makeCurrent()) { - blockingSpan = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("blockedOnFlush").startSpan(); + Context flushContext; + try (var namedOnlyForAutoClose = telemetryContext + .with(START_FLUSH_KEY, Instant.now()) + .makeCurrent()) { + flushContext = Context.current() + .with(METERING_CLOSURE.tracer.spanBuilder("blockedForFlush").startSpan()); } trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { - blockingSpan.end(); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, + log.atInfo().setMessage(()->"Done flushing").log(); + METERING_CLOSURE.meterIncrementEvent(flushContext, t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); + METERING_CLOSURE.meterHistogramMicros(flushContext, + t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro", + Duration.between(flushContext.get(START_FLUSH_KEY), Instant.now())); + Span.fromContext(flushContext).end(); if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index 0d6086c4a..0d300eb57 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -85,8 +85,7 @@ public HttpRequest resetCurrentRequest() { public LoggingHttpRequestHandler(Context incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { this.createdTime = Instant.now(); try (var scope = incomingContext.makeCurrent()) { - var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("frontendConnection").startSpan(); + var span = METERING_CLOSURE.tracer.spanBuilder("frontendConnection").startSpan(); telemetryContext = incomingContext.with(span); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "requestStarted"); } diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java index 44da58363..33817c884 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java @@ -34,7 +34,7 @@ public void addingCompressionRequestHeaderCompressesPayload() throws ExecutionEx JsonJoltTransformer.newBuilder() .addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP) .build(), null, testPacketCapture, "TEST", - TestRequestKey.getTestConnectionRequestId(0)); + TestRequestKey.getTestConnectionRequestContext(0)); final var payloadPartSize = 511; final var numParts = 1025; diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index 05f00f8b8..81d2281dd 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -13,6 +13,8 @@ configurations { } dependencies { + implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation 'org.opensearch.plugin:opensearch-security:2.6.0.0' opensearchSecurityPlugin 'org.opensearch.plugin:opensearch-security:2.6.0.0' implementation files(zipTree("$configurations.opensearchSecurityPlugin.singleFile").matching { @@ -38,14 +40,14 @@ dependencies { implementation group: 'com.beust', name: 'jcommander', version: '1.82' implementation 'com.google.protobuf:protobuf-java:3.22.2' - implementation group: 'io.opentelemetry', name:'opentelemetry-api', version: '1.30.0' - implementation group: 'io.opentelemetry', name: 'opentelemetry-sdk', version: '1.32.0' + implementation group: 'io.opentelemetry', name:'opentelemetry-api' + implementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' testImplementation project(':captureProtobufs') testImplementation testFixtures(project(path: ':testUtilities')) testImplementation testFixtures(project(path: ':captureOffloader')) - testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing', version: '1.32.0' + testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' } tasks.withType(Tar){ diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 60de2bcff..97b2a8293 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -284,7 +284,7 @@ public static void main(String[] args) throws InterruptedException, IOException var backsideUri = convertStringToUri(params.backsideUriString); if (params.otelCollectorEndpoint != null) { - SimpleMeteringClosure.initializeOpenTelemetry("capture-proxy", params.otelCollectorEndpoint); + SimpleMeteringClosure.initializeOpenTelemetry("capture", params.otelCollectorEndpoint); } var sksOp = Optional.ofNullable(params.sslConfigFilePath) diff --git a/TrafficCapture/trafficReplayer/build.gradle b/TrafficCapture/trafficReplayer/build.gradle index 7ca67b9d0..7cd49c99d 100644 --- a/TrafficCapture/trafficReplayer/build.gradle +++ b/TrafficCapture/trafficReplayer/build.gradle @@ -35,6 +35,7 @@ repositories { dependencies { //spotbugs 'com.github.spotbugs:spotbugs:4.7.3' def resilience4jVersion = "1.7.0"; + implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") implementation project(':captureProtobufs') implementation project(':coreUtilities') @@ -50,6 +51,8 @@ dependencies { implementation group: 'io.github.resilience4j', name: 'resilience4j-ratelimiter', version:"${resilience4jVersion}" implementation group: 'io.github.resilience4j', name: 'resilience4j-retry', version:"${resilience4jVersion}" implementation group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' + implementation group: 'io.opentelemetry', name:'opentelemetry-api' + implementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.6.0' implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' @@ -65,11 +68,14 @@ dependencies { testFixturesImplementation project(':replayerPlugins:jsonMessageTransformers:jsonMessageTransformerInterface') testFixturesImplementation testFixtures(project(path: ':testUtilities')) + testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") testFixturesImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testFixturesImplementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.15.0' testFixturesImplementation group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' testFixturesImplementation group: 'org.junit.jupiter', name:'junit-jupiter-api', version:'5.9.3' - + testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' + testFixturesImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' + testFixturesImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation project(':captureOffloader') testImplementation testFixtures(project(path: ':captureOffloader')) @@ -78,6 +84,7 @@ dependencies { testImplementation project(':replayerPlugins:jsonMessageTransformers:jsonJoltMessageTransformerProvider') testImplementation project(':replayerPlugins:jsonMessageTransformers:openSearch23PlusTargetTransformerProvider') + testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation group: 'org.apache.httpcomponents.client5', name: 'httpclient5', version: '5.2.1' testImplementation group: 'org.junit.jupiter', name:'junit-jupiter-api', version:'5.x.x' testImplementation group: 'org.testcontainers', name: 'junit-jupiter', version: '1.19.0' diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index c581ffa0c..3f2504862 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -4,17 +4,21 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import java.time.Instant; import java.util.List; public interface AccumulationCallbacks { - void onRequestReceived(@NonNull UniqueReplayerRequestKey key, @NonNull HttpMessageAndTimestamp request); - void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @NonNull RequestResponsePacketPair rrpp); - void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, + void onRequestReceived(@NonNull UniqueReplayerRequestKey key, RequestContext ctx, + @NonNull HttpMessageAndTimestamp request); + void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, RequestContext ctx, + @NonNull RequestResponsePacketPair rrpp); + void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, ConnectionContext ctx, @NonNull List trafficStreamKeysBeingHeld); - void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, + void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); - void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk); + void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ConnectionContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index dc5aca221..29fc83126 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -1,11 +1,16 @@ package org.opensearch.migrations.replay; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -49,6 +54,8 @@ */ @Slf4j public class CapturedTrafficToHttpTransactionAccumulator { + public static final String TELEMETRY_SCOPE_NAME = "Accumulator"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); public static final Duration EXPIRATION_GRANULARITY = Duration.ofSeconds(1); private final ExpiringTrafficStreamMap liveStreams; @@ -129,9 +136,10 @@ public void accept(ITrafficStreamWithKey trafficStreamAndKey) { var tsk = trafficStreamAndKey.getKey(); var accum = liveStreams.getOrCreateWithoutExpiration(tsk, k->createInitialAccumulation(trafficStreamAndKey)); var trafficStream = trafficStreamAndKey.getStream(); + var ctx = new ConnectionContext(tsk); for (int i=0; i"Connection terminated: removing " + partitionId + ":" + connectionId + " from liveStreams map").log(); @@ -145,7 +153,7 @@ public void accept(ITrafficStreamWithKey trafficStreamAndKey) { assert accum.state == Accumulation.State.WAITING_FOR_NEXT_READ_CHUNK || accum.state == Accumulation.State.IGNORING_LAST_REQUEST || trafficStream.getSubStreamCount() == 0; - listener.onTrafficStreamIgnored(tsk); + listener.onTrafficStreamIgnored(tsk, ctx); } } @@ -175,15 +183,16 @@ private enum CONNECTION_STATUS { public CONNECTION_STATUS addObservationToAccumulation(@NonNull Accumulation accum, @NonNull ITrafficStreamKey trafficStreamKey, + ConnectionContext ctx, TrafficObservation observation) { log.atTrace().setMessage(()->"Adding observation: "+observation).log(); var timestamp = TrafficStreamUtils.instantFromProtoTimestamp(observation.getTs()); liveStreams.expireOldEntries(trafficStreamKey, accum, timestamp); - return handleCloseObservationThatAffectEveryState(accum, observation, trafficStreamKey, timestamp) + return handleCloseObservationThatAffectEveryState(accum, observation, trafficStreamKey, ctx, timestamp) .or(() -> handleObservationForSkipState(accum, observation)) - .or(() -> handleObservationForReadState(accum, observation, trafficStreamKey, timestamp)) - .or(() -> handleObservationForWriteState(accum, observation, trafficStreamKey, timestamp)) + .or(() -> handleObservationForReadState(accum, ctx, observation, trafficStreamKey, timestamp)) + .or(() -> handleObservationForWriteState(accum, ctx, observation, trafficStreamKey, timestamp)) .orElseGet(() -> { log.atWarn().setMessage(()->"unaccounted for observation type " + observation).log(); return CONNECTION_STATUS.ALIVE; @@ -216,17 +225,19 @@ private static List getTrafficStreamsHeldByAccum(Accumulation handleCloseObservationThatAffectEveryState(Accumulation accum, TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, + ConnectionContext ctx, Instant timestamp) { if (observation.hasClose()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); - rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); + rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum, ctx); closedConnectionCounter.incrementAndGet(); - listener.onConnectionClose(accum.trafficChannelKey, accum.getIndexOfCurrentRequest(), - RequestResponsePacketPair.ReconstructionStatus.COMPLETE, timestamp, getTrafficStreamsHeldByAccum(accum)); + listener.onConnectionClose(accum.trafficChannelKey, accum.getIndexOfCurrentRequest(), ctx, + RequestResponsePacketPair.ReconstructionStatus.COMPLETE, + timestamp, getTrafficStreamsHeldByAccum(accum)); return Optional.of(CONNECTION_STATUS.CLOSED); } else if (observation.hasConnectionException()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); - rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); + rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum, ctx); exceptionConnectionCounter.incrementAndGet(); accum.resetForNextRequest(); log.atDebug().setMessage(()->"Removing accumulated traffic pair due to " + @@ -238,6 +249,7 @@ private static List getTrafficStreamsHeldByAccum(Accumulation } private Optional handleObservationForReadState(@NonNull Accumulation accum, + ConnectionContext ctx, TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, Instant timestamp) { @@ -256,7 +268,7 @@ private Optional handleObservationForReadState(@NonNull Accum log.atTrace().setMessage(() -> "Added request data for accum[" + connectionId + "]=" + accum).log(); } else if (observation.hasEndOfMessageIndicator()) { assert accum.hasRrPair(); - handleEndOfRequest(accum); + handleEndOfRequest(accum, ctx); } else if (observation.hasReadSegment()) { log.atTrace().setMessage(()->"Adding request segment for accum[" + connectionId + "]=" + accum).log(); var rrPair = accum.getOrCreateTransactionPair(trafficStreamKey); @@ -274,7 +286,7 @@ private Optional handleObservationForReadState(@NonNull Accum return Optional.of(CONNECTION_STATUS.ALIVE); } - private Optional handleObservationForWriteState(Accumulation accum, + private Optional handleObservationForWriteState(Accumulation accum, ConnectionContext ctx, TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, Instant timestamp) { @@ -301,8 +313,8 @@ private Optional handleObservationForWriteState(Accumulation assert rrPair.responseData.hasInProgressSegment(); rrPair.responseData.finalizeRequestSegments(timestamp); } else if (observation.hasRead() || observation.hasReadSegment()) { - rotateAccumulationOnReadIfNecessary(connectionId, accum); - return handleObservationForReadState(accum, observation, trafficStreamKey, timestamp); + rotateAccumulationOnReadIfNecessary(connectionId, accum, ctx); + return handleObservationForReadState(accum, ctx, observation, trafficStreamKey, timestamp); } return Optional.of(CONNECTION_STATUS.ALIVE); @@ -311,19 +323,20 @@ private Optional handleObservationForWriteState(Accumulation // This function manages the transition case when an observation comes in that would terminate // any previous HTTP transaction for the connection. It returns true if there WAS a previous // transaction that has been reset and false otherwise - private boolean rotateAccumulationIfNecessary(String connectionId, Accumulation accum) { + private boolean rotateAccumulationIfNecessary(String connectionId, Accumulation accum, ConnectionContext ctx) { // If this was brand new, we don't need to care about triggering the callback. // We only need to worry about this if we have yet to send the RESPONSE. if (accum.state == Accumulation.State.ACCUMULATING_WRITES) { log.atDebug().setMessage(()->"Resetting accum[" + connectionId + "]=" + accum).log(); - handleEndOfResponse(accum, RequestResponsePacketPair.ReconstructionStatus.COMPLETE); + handleEndOfResponse(accum, ctx, RequestResponsePacketPair.ReconstructionStatus.COMPLETE); return true; } return false; } - private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumulation accum) { - if (rotateAccumulationIfNecessary(connectionId, accum)) { + private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumulation accum, + ConnectionContext ctx) { + if (rotateAccumulationIfNecessary(connectionId, accum, ctx)) { reusedKeepAliveCounter.incrementAndGet(); return true; } else { @@ -334,7 +347,7 @@ private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumu /** * @return True if something was sent to the callback, false if nothing had been accumulated */ - private boolean handleEndOfRequest(Accumulation accumulation) { + private boolean handleEndOfRequest(Accumulation accumulation, ConnectionContext ctx) { assert accumulation.state == Accumulation.State.ACCUMULATING_READS : "state == " + accumulation.state; var requestPacketBytes = accumulation.getRrPair().requestData; metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) @@ -342,12 +355,13 @@ private boolean handleEndOfRequest(Accumulation accumulation) { .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); assert (requestPacketBytes != null); assert (!requestPacketBytes.hasInProgressSegment()); - listener.onRequestReceived(accumulation.getRequestKey(), requestPacketBytes); + var requestContext = new RequestContext(ctx, accumulation.getRequestKey()); + listener.onRequestReceived(accumulation.getRequestKey(), requestContext, requestPacketBytes); accumulation.state = Accumulation.State.ACCUMULATING_WRITES; return true; } - private void handleEndOfResponse(Accumulation accumulation, + private void handleEndOfResponse(Accumulation accumulation, ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status) { assert accumulation.state == Accumulation.State.ACCUMULATING_WRITES; metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) @@ -355,20 +369,23 @@ private void handleEndOfResponse(Accumulation accumulation, .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); var rrPair = accumulation.getRrPair(); rrPair.completionStatus = status; - listener.onFullDataReceived(accumulation.getRequestKey(), rrPair); + var requestContext = new RequestContext(ctx, accumulation.getRequestKey()); + listener.onFullDataReceived(accumulation.getRequestKey(), requestContext, rrPair); accumulation.resetForNextRequest(); } public void close() { liveStreams.values().forEach(accum -> { requestsTerminatedUponAccumulatorCloseCounter.incrementAndGet(); - fireAccumulationsCallbacksAndClose(accum, RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY); + fireAccumulationsCallbacksAndClose(accum, + RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY); }); liveStreams.clear(); } private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, RequestResponsePacketPair.ReconstructionStatus status) { + ConnectionContext ctx = new ConnectionContext(accumulation.trafficChannelKey); try { switch (accumulation.state) { case ACCUMULATING_READS: @@ -383,12 +400,12 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, log.warn("Terminating a TrafficStream reconstruction w/out an accumulated value, " + "assuming an empty server interaction and NOT reproducing this to the target cluster."); if (accumulation.hasRrPair()) { - listener.onTrafficStreamsExpired(status, + listener.onTrafficStreamsExpired(status, ctx, Collections.unmodifiableList(accumulation.getRrPair().trafficStreamKeysBeingHeld)); } return; case ACCUMULATING_WRITES: - handleEndOfResponse(accumulation, status); + handleEndOfResponse(accumulation, ctx, status); break; case WAITING_FOR_NEXT_READ_CHUNK: case IGNORING_LAST_REQUEST: @@ -399,7 +416,7 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, } finally { if (accumulation.hasSignaledRequests()) { listener.onConnectionClose(accumulation.trafficChannelKey, accumulation.getIndexOfCurrentRequest(), - status, accumulation.getLastTimestamp(), getTrafficStreamsHeldByAccum(accumulation)); + ctx, status, accumulation.getLastTimestamp(), getTrafficStreamsHeldByAccum(accumulation)); } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index d647abb34..35ae238b8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -11,21 +11,29 @@ import io.netty.util.concurrent.DefaultPromise; import io.netty.util.concurrent.DefaultThreadFactory; import io.netty.util.concurrent.Future; +import io.opentelemetry.context.ContextKey; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; import java.net.URI; +import java.time.Instant; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @Slf4j public class ClientConnectionPool { + private static final ContextKey RECORD_ID_KEY = ContextKey.named("recordId"); + public static final String TELEMETRY_SCOPE_NAME = "ClientConnectionPool"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); public static final String TARGET_CONNECTION_POOL_NAME = "targetConnectionPool"; private final URI serverUri; @@ -61,17 +69,17 @@ public ConnectionReplaySession load(final String s) { } private DiagnosticTrackableCompletableFuture - getResilientClientChannelProducer(EventLoop eventLoop, String diagnosticLabel) { + getResilientClientChannelProducer(EventLoop eventLoop, ConnectionContext connectionContext) { return new AdaptiveRateLimiter() .get(() -> { var clientConnectionChannelCreatedFuture = new StringTrackableCompletableFuture(new CompletableFuture<>(), () -> "waiting for createClientConnection to finish"); var channelFuture = NettyPacketToHttpConsumer.createClientConnection(eventLoop, - sslContext, serverUri, diagnosticLabel); + sslContext, serverUri, connectionContext); channelFuture.addListener(f -> { log.atInfo().setMessage(()-> - "New network connection result for " + diagnosticLabel + "=" + f.isSuccess()).log(); + "New network connection result for " + connectionContext + "=" + f.isSuccess()).log(); if (f.isSuccess()) { clientConnectionChannelCreatedFuture.future.complete(channelFuture); } else { @@ -135,7 +143,7 @@ public void closeConnection(String connId) { } public Future - submitEventualSessionGet(ISourceTrafficChannelKey channelKey, boolean ignoreIfNotPresent) { + submitEventualSessionGet(ISourceTrafficChannelKey channelKey, boolean ignoreIfNotPresent, ConnectionContext ctx) { ConnectionReplaySession channelFutureAndSchedule = getCachedSession(channelKey, ignoreIfNotPresent); if (channelFutureAndSchedule == null) { @@ -146,8 +154,7 @@ public void closeConnection(String connId) { return channelFutureAndSchedule.eventLoop.submit(() -> { if (channelFutureAndSchedule.getChannelFutureFuture() == null) { channelFutureAndSchedule.setChannelFutureFuture( - getResilientClientChannelProducer(channelFutureAndSchedule.eventLoop, - channelKey.getConnectionId())); + getResilientClientChannelProducer(channelFutureAndSchedule.eventLoop, ctx)); } return channelFutureAndSchedule; }); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index 29eff701b..5d03d7ddd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -7,6 +7,7 @@ import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -28,6 +29,6 @@ public PacketToTransformingHttpHandlerFactory(IJsonTransformer jsonTransformer, create(UniqueReplayerRequestKey requestKey) { log.trace("creating HttpJsonTransformingConsumer"); return new HttpJsonTransformingConsumer<>(jsonTransformer, authTransformerFactory, - new TransformedPacketReceiver(), requestKey.toString(), requestKey); + new TransformedPacketReceiver(), requestKey.toString(), new RequestContext(requestKey)); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index 542243d56..eaecdcca6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -9,6 +9,8 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; @@ -134,7 +136,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, Instant originalStart, Instant originalEnd, + scheduleRequest(UniqueReplayerRequestKey requestKey, RequestContext ctx, Instant originalStart, Instant originalEnd, int numPackets, Stream packets) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "request"; @@ -147,16 +149,17 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestKey.getTrafficStreamKey().getConnectionId()) .setAttribute(MetricsAttributeKey.DELAY_FROM_ORIGINAL_TO_SCHEDULED_START, Duration.between(originalStart, start).toMillis()) .setAttribute(MetricsAttributeKey.SCHEDULED_SEND_TIME, start.toString()).emit(); - var sendResult = networkSendOrchestrator.scheduleRequest(requestKey, start, interval, packets); + var sendResult = networkSendOrchestrator.scheduleRequest(requestKey, ctx, start, interval, packets); return hookWorkFinishingUpdates(sendResult, originalStart, requestKey, label); } - public void closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, Instant timestamp) { + public void closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, + ConnectionContext ctx, Instant timestamp) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); logStartOfWork(new IndexedChannelInteraction(channelKey, channelInteractionNum), newCount, atTime, label); - var future = networkSendOrchestrator.scheduleClose(channelKey, channelInteractionNum, atTime); + var future = networkSendOrchestrator.scheduleClose(channelKey, channelInteractionNum, ctx, atTime); hookWorkFinishingUpdates(future, timestamp, channelKey, label); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index df6851652..94387dc3f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -9,6 +9,8 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -55,30 +57,32 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, Instant start, Duration interval, Stream packets) { + scheduleRequest(UniqueReplayerRequestKey requestKey, RequestContext ctx, + Instant start, Duration interval, Stream packets) { var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final aggregated response"); log.atDebug().setMessage(()->"Scheduling request for "+requestKey+" at start time "+start).log(); return asynchronouslyInvokeRunnableToSetupFuture(requestKey.getTrafficStreamKey(), - requestKey.getReplayerRequestIndex(), - false, finalTunneledResponse, - channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(requestKey, + requestKey.getReplayerRequestIndex(), ctx, false, finalTunneledResponse, + channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(requestKey, ctx, channelFutureAndRequestSchedule, finalTunneledResponse, start, interval, packets)); } public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChannelKey channelKey, - int channelInteractionNum, Instant timestamp) { + int channelInteractionNum, + ConnectionContext ctx, + Instant timestamp) { var channelInteraction = new IndexedChannelInteraction(channelKey, channelInteractionNum); var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm close has finished"); log.atDebug().setMessage(()->"Scheduling CLOSE for "+channelInteraction+" at time "+timestamp).log(); - asynchronouslyInvokeRunnableToSetupFuture(channelKey, channelInteractionNum, true, + asynchronouslyInvokeRunnableToSetupFuture(channelKey, channelInteractionNum, ctx,true, finalTunneledResponse, channelFutureAndRequestSchedule-> - scheduleOnConnectionReplaySession(channelKey, channelInteractionNum, channelFutureAndRequestSchedule, - finalTunneledResponse, timestamp, "close", () -> { + scheduleOnConnectionReplaySession(channelKey, channelInteractionNum, ctx, + channelFutureAndRequestSchedule, finalTunneledResponse, timestamp, "close", () -> { log.trace("Closing client connection " + channelInteraction); clientConnectionPool.closeConnection(channelKey.getConnectionId()); finalTunneledResponse.future.complete(null); @@ -89,11 +93,11 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne private DiagnosticTrackableCompletableFuture asynchronouslyInvokeRunnableToSetupFuture(ISourceTrafficChannelKey channelKey, int channelInteractionNumber, - boolean ignoreIfChannelNotPresent, + ConnectionContext ctx, boolean ignoreIfChannelNotPresent, DiagnosticTrackableCompletableFuture finalTunneledResponse, Consumer successFn) { var channelFutureAndScheduleFuture = - clientConnectionPool.submitEventualSessionGet(channelKey, ignoreIfChannelNotPresent); + clientConnectionPool.submitEventualSessionGet(channelKey, ignoreIfChannelNotPresent, ctx); channelFutureAndScheduleFuture.addListener(submitFuture->{ if (!submitFuture.isSuccess()) { log.atError().setCause(submitFuture.cause()) @@ -141,6 +145,7 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne } private void scheduleOnConnectionReplaySession(ISourceTrafficChannelKey channelKey, int channelInteractionIdx, + ConnectionContext ctx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture futureToBeCompletedByTask, Instant atTime, String activityNameForLogging, Runnable task) { @@ -186,17 +191,17 @@ private void scheduleOnConnectionReplaySession(ISourceTrafficChannelKey chan "... " + schedule).log(); } - private void scheduleSendOnConnectionReplaySession(UniqueReplayerRequestKey requestKey, + private void scheduleSendOnConnectionReplaySession(UniqueReplayerRequestKey requestKey, RequestContext ctx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture responseFuture, Instant start, Duration interval, Stream packets) { var eventLoop = channelFutureAndRequestSchedule.eventLoop; var packetReceiverRef = new AtomicReference(); Runnable packetSender = () -> sendNextPartAndContinue(() -> - getPacketReceiver(requestKey, channelFutureAndRequestSchedule.getInnerChannelFuture(), + getPacketReceiver(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); - scheduleOnConnectionReplaySession(requestKey.trafficStreamKey, requestKey.getSourceRequestIndex(), + scheduleOnConnectionReplaySession(requestKey.trafficStreamKey, requestKey.getSourceRequestIndex(), ctx, channelFutureAndRequestSchedule, responseFuture, start, "send", packetSender); } @@ -224,10 +229,10 @@ private long getDelayFromNowMs(Instant to) { } private static NettyPacketToHttpConsumer - getPacketReceiver(UniqueReplayerRequestKey requestKey, ChannelFuture channelFuture, + getPacketReceiver(RequestContext requestContext, ChannelFuture channelFuture, AtomicReference packetReceiver) { if (packetReceiver.get() == null) { - packetReceiver.set(new NettyPacketToHttpConsumer(channelFuture, requestKey.toString(), requestKey)); + packetReceiver.set(new NettyPacketToHttpConsumer(channelFuture, requestContext)); } return packetReceiver.get(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index d92f34b0d..47e3ea27f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -14,7 +14,10 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; @@ -67,8 +70,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.migrations.coreutils.SimpleMeteringClosure.initializeOpenTelemetry; - @Slf4j public class TrafficReplayer { @@ -384,7 +385,7 @@ public static void main(String[] args) return; } if (params.otelCollectorEndpoint != null) { - initializeOpenTelemetry("traffic-replayer", params.otelCollectorEndpoint); + SimpleMeteringClosure.initializeOpenTelemetry("replay", params.otelCollectorEndpoint); } try (var blockingTrafficSource = TrafficCaptureSourceFactory.createTrafficCaptureSource(params, @@ -601,11 +602,12 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { private ITrafficCaptureSource trafficCaptureSource; @Override - public void onRequestReceived(UniqueReplayerRequestKey requestKey, HttpMessageAndTimestamp request) { + public void onRequestReceived(UniqueReplayerRequestKey requestKey, RequestContext ctx, + HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); liveTrafficStreamLimiter.addWork(1); - var requestPushFuture = transformAndSendRequest(replayEngine, request, requestKey); + var requestPushFuture = transformAndSendRequest(replayEngine, request, requestKey, ctx); requestFutureMap.put(requestKey, requestPushFuture); liveRequests.put(requestKey, true); requestPushFuture.map(f->f.whenComplete((v,t)->{ @@ -618,7 +620,7 @@ public void onRequestReceived(UniqueReplayerRequestKey requestKey, HttpMessageAn } @Override - public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, + public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, RequestContext ctx, @NonNull RequestResponsePacketPair rrPair) { log.atInfo().setMessage(()->"Done receiving captured stream for " + requestKey + ":" + rrPair.requestData).log(); @@ -674,7 +676,7 @@ Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, Re @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - List trafficStreamKeysBeingHeld) { + ConnectionContext ctx, List trafficStreamKeysBeingHeld) { commitTrafficStreams(trafficStreamKeysBeingHeld, status); } @@ -696,15 +698,15 @@ private void commitTrafficStreams(List trafficStreamKeysBeing @Override public void onConnectionClose(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - RequestResponsePacketPair.ReconstructionStatus status, Instant timestamp, - List trafficStreamKeysBeingHeld) { + ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + Instant timestamp, List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); - replayEngine.closeConnection(channelKey, channelInteractionNum, timestamp); + replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); commitTrafficStreams(trafficStreamKeysBeingHeld, status); } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) { + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ConnectionContext ctx) { commitTrafficStreams(List.of(tsk), true); } @@ -858,15 +860,15 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture transformAndSendRequest(ReplayEngine replayEngine, HttpMessageAndTimestamp request, - UniqueReplayerRequestKey requestKey) { - return transformAndSendRequest(inputRequestTransformerFactory, replayEngine, + UniqueReplayerRequestKey requestKey, RequestContext ctx) { + return transformAndSendRequest(inputRequestTransformerFactory, replayEngine, ctx, request.getFirstPacketTimestamp(), request.getLastPacketTimestamp(), requestKey, request.packetBytes::stream); } public static DiagnosticTrackableCompletableFuture transformAndSendRequest(PacketToTransformingHttpHandlerFactory inputRequestTransformerFactory, - ReplayEngine replayEngine, + ReplayEngine replayEngine, RequestContext ctx, @NonNull Instant start, @NonNull Instant end, UniqueReplayerRequestKey requestKey, Supplier> packetsSupplier) @@ -880,7 +882,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture - replayEngine.scheduleRequest(requestKey, start, end, + replayEngine.scheduleRequest(requestKey, ctx, start, end, transformedResult.transformedOutput.size(), transformedResult.transformedOutput.streamRetained()) .map(future->future.thenApply(t-> diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 47ea9ad0a..321557815 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -18,14 +18,17 @@ import io.netty.handler.logging.LoggingHandler; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; +import io.opentelemetry.context.ContextKey; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.AggregatedRawResponse; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -36,6 +39,11 @@ @Slf4j public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer { + private static final ContextKey START_OF_REQUEST_KEY = ContextKey.named("startOfRequest"); + private static final ContextKey START_OF_WRITE_KEY = ContextKey.named("startOfWrite"); + public static final String TELEMETRY_SCOPE_NAME = "HttpSender"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + /** * Set this to of(LogLevel.ERROR) or whatever level you'd like to get logging between each handler. * Set this to Optional.empty() to disable intra-handler logging. @@ -52,18 +60,15 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - final String diagnosticLabel; - private UniqueReplayerRequestKey uniqueRequestKeyForMetricsLogging; + RequestContext tracingContext; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, - String diagnosticLabel, UniqueReplayerRequestKey uniqueRequestKeyForMetricsLogging) { - this(createClientConnection(eventLoopGroup, sslContext, serverUri, diagnosticLabel), - diagnosticLabel, uniqueRequestKeyForMetricsLogging); + RequestContext requestContext) { + this(createClientConnection(eventLoopGroup, sslContext, serverUri, requestContext), requestContext); } - public NettyPacketToHttpConsumer(ChannelFuture clientConnection, String diagnosticLabel, UniqueReplayerRequestKey uniqueRequestKeyForMetricsLogging) { - this.diagnosticLabel = "[" + diagnosticLabel + "] "; - this.uniqueRequestKeyForMetricsLogging = uniqueRequestKeyForMetricsLogging; + public NettyPacketToHttpConsumer(ChannelFuture clientConnection, RequestContext ctx) { + this.tracingContext = ctx; responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), @@ -87,7 +92,7 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, String diagnost } public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, SslContext sslContext, - URI serverUri, String diagnosticLabel) { + URI serverUri, ConnectionContext connectionContext) { String host = serverUri.getHost(); int port = serverUri.getPort(); log.atTrace().setMessage(()->"Active - setting up backend connection to " + host + ":" + port).log(); @@ -105,8 +110,8 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup if (connectFuture.isSuccess()) { var pipeline = connectFuture.channel().pipeline(); pipeline.removeFirst(); - log.atTrace() - .setMessage(()->diagnosticLabel + " Done setting up client channel & it was successful").log(); + log.atTrace().setMessage(()->connectionContext.getChannelKey() + + " Done setting up client channel & it was successful").log(); if (sslContext != null) { var sslEngine = sslContext.newEngine(connectFuture.channel().alloc()); sslEngine.setUseClientMode(true); @@ -126,7 +131,7 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup } else { // Close the connection if the connection attempt has failed. log.atWarn().setCause(connectFuture.cause()) - .setMessage(() -> diagnosticLabel + " CONNECT future was not successful, " + + .setMessage(() -> connectionContext.getChannelKey() + " CONNECT future was not successful, " + "so setting the channel future's result to an exception").log(); rval.setFailure(connectFuture.cause()); } @@ -190,8 +195,8 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa System.identityHashCode(packetData) + ")").log(); return writePacketAndUpdateFuture(packetData); } else { - log.atWarn().setMessage(()->diagnosticLabel + "outbound channel was not set up successfully, " + - "NOT writing bytes hash=" + System.identityHashCode(packetData)).log(); + log.atWarn().setMessage(()->tracingContext.getRequestKey() + "outbound channel was not set up " + + "successfully, NOT writing bytes hash=" + System.identityHashCode(packetData)).log(); channel.close(); return DiagnosticTrackableCompletableFuture.Factory.failedFuture(channelInitException, ()->""); } @@ -206,13 +211,14 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa final var completableFuture = new DiagnosticTrackableCompletableFuture(new CompletableFuture<>(), ()->"CompletableFuture that will wait for the netty future to fill in the completion value"); final int readableBytes = packetData.readableBytes(); + METERING_CLOSURE.meterIncrementEvent(tracingContext.context, "readBytes", packetData.readableBytes()); channel.writeAndFlush(packetData) .addListener((ChannelFutureListener) future -> { Throwable cause = null; try { if (!future.isSuccess()) { - log.atWarn().setMessage(()->diagnosticLabel + "closing outbound channel because WRITE " + - "future was not successful " + future.cause() + " hash=" + + log.atWarn().setMessage(()->tracingContext.getRequestKey() + "closing outbound channel " + + "because WRITE future was not successful " + future.cause() + " hash=" + System.identityHashCode(packetData) + " will be sending the exception to " + completableFuture).log(); future.channel().close(); // close the backside @@ -222,17 +228,17 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa cause = e; } if (cause == null) { - log.atTrace().setMessage(()->"Signaling previously returned CompletableFuture packet write was successful: " - + packetData + " hash=" + System.identityHashCode(packetData)).log(); + log.atTrace().setMessage(()->"Previously returned CompletableFuture packet write was " + + "successful: " + packetData + " hash=" + System.identityHashCode(packetData)).log(); completableFuture.future.complete(null); } else { - log.atInfo().setMessage(()->"Signaling previously returned CompletableFuture packet write had an exception : " - + packetData + " hash=" + System.identityHashCode(packetData)).log(); + log.atInfo().setMessage(()->"Previously returned CompletableFuture packet write had " + + " an exception :" + packetData + " hash=" + System.identityHashCode(packetData)).log(); metricsLogger.atError(MetricsEvent.WRITING_REQUEST_COMPONENT_FAILED, cause) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()) - .setAttribute(MetricsAttributeKey.REQUEST_ID, uniqueRequestKeyForMetricsLogging) + .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getRequestKey().toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, - uniqueRequestKeyForMetricsLogging.getTrafficStreamKey().getConnectionId()).emit(); + tracingContext.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); completableFuture.future.completeExceptionally(cause); channel.close(); } @@ -241,8 +247,9 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa ". Created future for writing data="+completableFuture).log(); metricsLogger.atSuccess(MetricsEvent.WROTE_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()) - .setAttribute(MetricsAttributeKey.REQUEST_ID, uniqueRequestKeyForMetricsLogging) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, uniqueRequestKeyForMetricsLogging.getTrafficStreamKey().getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getRequestKey()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, + tracingContext.getRequestKey().getTrafficStreamKey().getConnectionId()) .setAttribute(MetricsAttributeKey.SIZE_IN_BYTES, readableBytes).emit(); return completableFuture; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 8d17c8187..3019e0c0b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -11,7 +11,7 @@ import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; import org.opensearch.migrations.transform.IAuthTransformerFactory; @@ -50,7 +50,7 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume private final RequestPipelineOrchestrator pipelineOrchestrator; private final EmbeddedChannel channel; private static final MetricsLogger metricsLogger = new MetricsLogger("HttpJsonTransformingConsumer"); - private UniqueReplayerRequestKey requestKeyForMetricsLogging; + private RequestContext requestContext; /** * Roughly try to keep track of how big each data chunk was that came into the transformer. These values @@ -68,15 +68,15 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, IAuthTransformerFactory authTransformerFactory, IPacketFinalizingConsumer transformedPacketReceiver, String diagnosticLabel, - UniqueReplayerRequestKey requestKeyForMetricsLogging) { + RequestContext requestContext) { chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); channel = new EmbeddedChannel(); + this.requestContext = requestContext; pipelineOrchestrator = new RequestPipelineOrchestrator<>(chunkSizes, transformedPacketReceiver, - authTransformerFactory, diagnosticLabel, requestKeyForMetricsLogging); + authTransformerFactory, diagnosticLabel, requestContext); pipelineOrchestrator.addInitialHandlers(channel.pipeline(), transformer); - this.requestKeyForMetricsLogging = requestKeyForMetricsLogging; } private NettySendByteBufsToPacketHandlerHandler getOffloadingHandler() { @@ -142,15 +142,15 @@ public DiagnosticTrackableCompletableFuture"transformedHttpMessageValue"); } @@ -180,8 +180,8 @@ private static Throwable unwindPossibleCompletionException(Throwable t) { consumptionChainedFuture.thenCompose(v -> packetConsumer.finalizeRequest(), ()->"HttpJsonTransformingConsumer.redriveWithoutTransformation.compose()"); metricsLogger.atError(MetricsEvent.REQUEST_REDRIVEN_WITHOUT_TRANSFORMATION, reason) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestKeyForMetricsLogging) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestKeyForMetricsLogging.getTrafficStreamKey().getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getChannelKey().getConnectionId()) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); return finalizedFuture.map(f->f.thenApply(r->reason == null ? new TransformedOutputAndResult(r, HttpRequestTransformationStatus.SKIPPED, null) : diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 8debf6c1e..771076d52 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -10,7 +10,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datahandlers.PayloadAccessFaultingMap; import org.opensearch.migrations.replay.datahandlers.PayloadNotLoadedException; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IJsonTransformer; @@ -27,19 +27,19 @@ public class NettyDecodedHttpRequestPreliminaryConvertHandler extends Channel final IJsonTransformer transformer; final List> chunkSizes; final String diagnosticLabel; - private UniqueReplayerRequestKey requestKeyForMetricsLogging; + private RequestContext requestContext; static final MetricsLogger metricsLogger = new MetricsLogger("NettyDecodedHttpRequestPreliminaryConvertHandler"); public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, String diagnosticLabel, - UniqueReplayerRequestKey requestKeyForMetricsLogging) { + RequestContext requestContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; this.diagnosticLabel = "[" + diagnosticLabel + "] "; - this.requestKeyForMetricsLogging = requestKeyForMetricsLogging; + this.requestContext = requestContext; } @Override @@ -55,8 +55,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception .append(request.protocolVersion().text()) .toString()); metricsLogger.atSuccess(MetricsEvent.CAPTURED_REQUEST_PARSED_TO_HTTP) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestKeyForMetricsLogging) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestKeyForMetricsLogging.getTrafficStreamKey().getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getChannelKey().getConnectionId()) .setAttribute(MetricsAttributeKey.HTTP_METHOD, request.method()) .setAttribute(MetricsAttributeKey.HTTP_ENDPOINT, request.uri()).emit(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index e434efce9..5e285847d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -11,6 +11,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -43,7 +44,7 @@ public class RequestPipelineOrchestrator { private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; final String diagnosticLabel; - private UniqueReplayerRequestKey requestKeyForMetricsLogging; + private RequestContext requestContext; @Getter final IAuthTransformerFactory authTransfomerFactory; @@ -51,13 +52,13 @@ public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, String diagnosticLabel, - UniqueReplayerRequestKey requestKeyForMetricsLogging) { + RequestContext requestContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : IAuthTransformerFactory.NullAuthTransformerFactory.instance; this.diagnosticLabel = diagnosticLabel; - this.requestKeyForMetricsLogging = requestKeyForMetricsLogging; + this.requestContext = requestContext; } static void removeThisAndPreviousHandlers(ChannelPipeline pipeline, ChannelHandler targetHandler) { @@ -100,7 +101,7 @@ void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) // HttpRequestDecoder when the HttpRequestDecoder is removed from the pipeline BEFORE the // NettyDecodedHttpRequestHandler is removed. pipeline.addLast(new NettyDecodedHttpRequestPreliminaryConvertHandler(transformer, chunkSizes, this, - diagnosticLabel, requestKeyForMetricsLogging)); + diagnosticLabel, requestContext)); addLoggingHandler(pipeline, "B"); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java new file mode 100644 index 000000000..0312d5140 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java @@ -0,0 +1,34 @@ +package org.opensearch.migrations.replay.tracing; + +import io.netty.util.AttributeKey; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; +import lombok.NonNull; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; + +import java.util.stream.Stream; + +public class ConnectionContext implements WithAttributes { + protected static final ContextKey CHANNEL_KEY_CONTEXT_KEY = ContextKey.named("channelKey"); + protected static final AttributeKey CHANNEL_ATTR = AttributeKey.newInstance("channelKey"); + + public final Context context; + + public ConnectionContext(ISourceTrafficChannelKey tsk) { + this(Context.current().with(CHANNEL_KEY_CONTEXT_KEY, tsk)); + } + + public ConnectionContext(Context c) { + assert c.get(CHANNEL_KEY_CONTEXT_KEY) != null; + context = c; + } + + public @NonNull ISourceTrafficChannelKey getChannelKey() { + return context.get(CHANNEL_KEY_CONTEXT_KEY); + } + + @Override + public Stream getAttributeKeys() { + return Stream.of(CHANNEL_ATTR); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java new file mode 100644 index 000000000..bfc0f5407 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java @@ -0,0 +1,26 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.context.Context; +import io.opentelemetry.context.ContextKey; +import lombok.NonNull; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; + +public class RequestContext extends ConnectionContext { + private static final ContextKey UNIQUE_REQUEST_KEY = ContextKey.named("requestId"); + + public RequestContext(UniqueReplayerRequestKey requestKey) { + this(Context.current(), requestKey); + } + + public RequestContext(ConnectionContext ctx, UniqueReplayerRequestKey requestKey) { + this(ctx.context, requestKey); + } + + public RequestContext(Context context, UniqueReplayerRequestKey requestKey) { + super(context.with(UNIQUE_REQUEST_KEY, requestKey).with(CHANNEL_KEY_CONTEXT_KEY, requestKey.trafficStreamKey)); + } + + public @NonNull UniqueReplayerRequestKey getRequestKey() { + return context.get(UNIQUE_REQUEST_KEY); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java new file mode 100644 index 000000000..1988c7ae3 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java @@ -0,0 +1,9 @@ +package org.opensearch.migrations.replay.tracing; + +import io.netty.util.AttributeKey; + +import java.util.stream.Stream; + +public interface WithAttributes { + Stream getAttributeKeys(); +} diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java index c84987938..ea1bb6eb9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java @@ -15,7 +15,6 @@ import java.util.Random; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Function; import java.util.function.IntFunction; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -35,7 +34,7 @@ public void testTransformer() throws Exception { var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformer = new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME); var transformingHandler = new HttpJsonTransformingConsumer(transformer, null, testPacketCapture, - "TEST", TestRequestKey.getTestConnectionRequestId(0)); + "TEST", TestRequestKey.getTestConnectionRequestContext(0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + "HoSt: " + SOURCE_CLUSTER_NAME + "\r\n" + @@ -88,7 +87,7 @@ public void testMalformedPayloadIsPassedThrough() throws Exception { var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME), httpBasicAuthTransformer, testPacketCapture, "TEST", - TestRequestKey.getTestConnectionRequestId(0)); + TestRequestKey.getTestConnectionRequestContext(0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + @@ -114,7 +113,7 @@ public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exce var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME, null, "[{\"JsonTransformerForOpenSearch23PlusTargetTransformerProvider\":\"\"}]"), - null, testPacketCapture, "TEST", TestRequestKey.getTestConnectionRequestId(0)); + null, testPacketCapture, "TEST", TestRequestKey.getTestConnectionRequestContext(0)); Random r = new Random(2); var stringParts = IntStream.range(0, 1).mapToObj(i-> TestUtils.makeRandomString(r, 10)).map(o->(String)o) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index 657254c16..e7c4b71e6 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -5,7 +5,6 @@ import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.FullHttpResponse; import lombok.extern.slf4j.Slf4j; -import org.junit.Assume; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; @@ -40,20 +39,21 @@ public void testThatSchedulingWorks() throws Exception { Instant lastEndTime = baseTime; var scheduledItems = new ArrayList>(); for (int i = 0; i trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, + ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant when, List trafficStreamKeysBeingHeld) { } - @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) {} + @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, + ConnectionContext ctx) {} }); var tsList = trafficStreams.collect(Collectors.toList()); trafficStreams = tsList.stream(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 089880081..f52770fb9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -9,6 +9,8 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; @@ -154,30 +156,35 @@ public void testReader() throws Exception { new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey id, HttpMessageAndTimestamp request) { + public void onRequestReceived(UniqueReplayerRequestKey id, RequestContext ctx, + HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(bytesList)); } @Override - public void onFullDataReceived(UniqueReplayerRequestKey key, RequestResponsePacketPair fullPair) { + public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, + RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); } @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, + ConnectionContext ctx, List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, + ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant when, List trafficStreamKeysBeingHeld) { } - @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) {} + @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, + ConnectionContext ctx) {} }); var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); @@ -201,29 +208,33 @@ public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { "CapturedTrafficToHttpTransactionAccumulator that's being used in this unit test!", new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey id, HttpMessageAndTimestamp request) { + public void onRequestReceived(UniqueReplayerRequestKey id, RequestContext ctx, + HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(bytesList)); } @Override - public void onFullDataReceived(UniqueReplayerRequestKey key, RequestResponsePacketPair fullPair) { + public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, + RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); } @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, + ConnectionContext ctx, List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - RequestResponsePacketPair.ReconstructionStatus status, + ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant when, List trafficStreamKeysBeingHeld) { } - @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) {} + @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, + ConnectionContext ctx) {} } ); byte[] serializedChunks; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index e60578199..5927410f0 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -22,6 +22,7 @@ import org.opensearch.migrations.replay.TransformationLoader; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.testutils.HttpFirstLine; import org.opensearch.migrations.testutils.PortFinder; @@ -128,9 +129,11 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except var testServer = testServers.get(useTls); var sslContext = !testServer.localhostEndpoint().getScheme().toLowerCase().equals("https") ? null : SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); - var nphc = new NettyPacketToHttpConsumer(new NioEventLoopGroup(4, new DefaultThreadFactory("test")), - testServer.localhostEndpoint(), sslContext, "unitTest"+i, - TestRequestKey.getTestConnectionRequestId(0)); + var nphc = new NettyPacketToHttpConsumer( + new NioEventLoopGroup(4, new DefaultThreadFactory("test")), + testServer.localhostEndpoint(), + sslContext, + TestRequestKey.getTestConnectionRequestContext(0)); nphc.consumeBytes((EXPECTED_REQUEST_STRING).getBytes(StandardCharsets.UTF_8)); var aggregatedResponse = nphc.finalizeRequest().get(); var responseBytePackets = aggregatedResponse.getCopyOfPackets(); @@ -162,9 +165,10 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) for (int i = 0; i < 2; ++i) { String connId = "TEST_" + j; var trafficStreamKey = new PojoTrafficStreamKey("testNodeId", connId, 0); + var requestKey = new UniqueReplayerRequestKey(trafficStreamKey, 0, i); + var ctx = new RequestContext(requestKey); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, - sendingFactory, Instant.now(), Instant.now(), - new UniqueReplayerRequestKey(trafficStreamKey, 0, i), + sendingFactory, ctx, Instant.now(), Instant.now(), requestKey, ()->Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); log.info("requestFinishFuture="+requestFinishFuture); var aggregatedResponse = requestFinishFuture.get(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java index ba6ab87f0..f4c7031ff 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java @@ -28,7 +28,7 @@ public void testPassThroughSinglePacketPost() throws Exception { new HttpJsonTransformingConsumer(new TransformationLoader() .getTransformerFactoryLoader(null), null, testPacketCapture, "TEST", - TestRequestKey.getTestConnectionRequestId(0)); + TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -49,7 +49,7 @@ public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Ex new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), null, testPacketCapture, "TEST", - TestRequestKey.getTestConnectionRequestId(0)); + TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -75,7 +75,7 @@ public void testRemoveAuthHeadersWorks() throws Exception { new TransformationLoader().getTransformerFactoryLoader("test.domain"), RemovingAuthTransformerFactory.instance, testPacketCapture, "TEST", - TestRequestKey.getTestConnectionRequestId(0)); + TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/get_withAuthHeader.txt")) { @@ -114,7 +114,7 @@ private void walkMaps(Object o) { }); var transformingHandler = new HttpJsonTransformingConsumer(complexTransformer, null, - testPacketCapture, "TEST", TestRequestKey.getTestConnectionRequestId(0)); + testPacketCapture, "TEST", TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 5751cb4aa..2d94b11a2 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -2,14 +2,17 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.RequestContext; public class TestRequestKey { private TestRequestKey() {} - public static final UniqueReplayerRequestKey getTestConnectionRequestId(int replayerIdx) { - return new UniqueReplayerRequestKey( + public static final RequestContext getTestConnectionRequestContext(int replayerIdx) { + var rk = new UniqueReplayerRequestKey( new PojoTrafficStreamKey("testNodeId", "testConnectionId", 0), 0, replayerIdx); + return new RequestContext(new UniqueReplayerRequestKey(rk.trafficStreamKey, 1, 1)); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index 77322ffa6..59d4576f2 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -141,7 +141,7 @@ static void runPipelineAndValidate(IJsonTransformer transformer, var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), new AggregatedRawResponse(-1, Duration.ZERO, new ArrayList<>(), null)); var transformingHandler = new HttpJsonTransformingConsumer<>(transformer, authTransformer, testPacketCapture, - "TEST", TestRequestKey.getTestConnectionRequestId(0)); + "TEST", TestRequestKey.getTestConnectionRequestContext(0)); var contentLength = stringParts.stream().mapToInt(String::length).sum(); var headerString = "GET / HTTP/1.1\r\n" + From 900bc6dae0f6d2a5b9d9c2611133f1634bac93f7 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 29 Nov 2023 16:56:00 -0500 Subject: [PATCH 09/94] Start moving away from ThreadLocal and 'current contexts' and toward explicitly passing strongly typed context objects. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 130 ++++++++---------- .../tracing/KafkaRecordContext.java | 49 +++++++ .../KafkaCaptureFactoryTest.java | 12 +- TrafficCapture/captureOffloader/build.gradle | 7 +- .../FileConnectionCaptureFactory.java | 3 +- .../IConnectionCaptureFactory.java | 4 +- .../tracing/ConnectionContext.java | 33 +++++ .../InMemoryConnectionCaptureFactory.java | 6 +- TrafficCapture/coreUtilities/build.gradle | 6 +- .../coreutils/SimpleMeteringClosure.java | 66 +++++---- .../migrations/tracing/ContextWithSpan.java | 10 ++ .../migrations/tracing/EmptyContext.java | 25 ++++ .../tracing/IConnectionContext.java | 21 +++ .../tracing/IReplayerRequestContext.java | 16 +++ .../migrations/tracing/IRequestContext.java | 16 +++ .../migrations/tracing/IWithAttributes.java | 30 ++++ .../migrations/tracing/IWithStartTime.java | 7 + ...allyReliableLoggingHttpRequestHandler.java | 26 ++-- .../netty/LoggingHttpRequestHandler.java | 36 ++--- .../netty/LoggingHttpResponseHandler.java | 15 +- ...ReliableLoggingHttpRequestHandlerTest.java | 14 +- .../proxyserver/CaptureProxy.java | 2 +- .../netty/ProxyChannelInitializer.java | 12 +- .../migrations/replay/Accumulation.java | 21 ++- .../replay/AccumulationCallbacks.java | 8 +- ...edTrafficToHttpTransactionAccumulator.java | 66 ++++----- .../replay/ClientConnectionPool.java | 8 +- .../replay/PacketConsumerFactory.java | 3 +- ...acketToTransformingHttpHandlerFactory.java | 6 +- .../migrations/replay/ReplayEngine.java | 4 +- .../replay/RequestResponsePacketPair.java | 6 +- .../replay/RequestSenderOrchestrator.java | 15 +- .../migrations/replay/TrafficReplayer.java | 10 +- .../NettyPacketToHttpConsumer.java | 27 ++-- .../http/HttpJsonTransformingConsumer.java | 54 +++++--- ...dHttpRequestPreliminaryConvertHandler.java | 2 +- .../http/RequestPipelineOrchestrator.java | 1 - .../replay/tracing/ChannelKeyContext.java | 25 ++++ .../replay/tracing/ConnectionContext.java | 34 ----- .../replay/tracing/RequestContext.java | 54 ++++++-- .../replay/tracing/WithAttributes.java | 9 -- .../replay/RequestSenderOrchestratorTest.java | 4 +- .../replay/ResultsToLogsConsumerTest.java | 6 +- ...afficToHttpTransactionAccumulatorTest.java | 14 +- .../replay/TrafficReplayerTest.java | 14 +- .../NettyPacketToHttpConsumerTest.java | 4 +- .../migrations/replay/TestRequestKey.java | 5 +- 47 files changed, 591 insertions(+), 355 deletions(-) create mode 100644 TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java create mode 100644 TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index aaa694e55..29c27f425 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader; import com.google.protobuf.CodedOutputStream; -import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.trace.Span; import io.opentelemetry.context.Context; import io.opentelemetry.context.ContextKey; @@ -15,12 +14,15 @@ import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.EmptyContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.KafkaRecordContext; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.nio.ByteBuffer; @@ -33,12 +35,8 @@ @Slf4j public class KafkaCaptureFactory implements IConnectionCaptureFactory { - private static final ContextKey RECORD_ID_KEY = ContextKey.named("recordId"); - private static final ContextKey TOPIC_KEY = ContextKey.named("topic"); - private static final ContextKey RECORD_SIZE_KEY = ContextKey.named("recordSize"); - private static final ContextKey START_FLUSH_KEY = ContextKey.named("startKafkaSend"); - public static final String TELEMETRY_SCOPE_NAME = "KafkaCapture"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("KafkaCapture"); + private static final MetricsLogger metricsLogger = new MetricsLogger("BacksideHandler"); @@ -66,23 +64,20 @@ public KafkaCaptureFactory(String nodeId, Producer producer, int } @Override - public IChannelConnectionCaptureSerializer createOffloader(String connectionId) { - Span offloaderSpan = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("offloader").startSpan(); - offloaderSpan.setAttribute("offloaderConnectionId", connectionId); - var context = Context.current().with(offloaderSpan); - METERING_CLOSURE.meterIncrementEvent(context, "offloader_created"); - METERING_CLOSURE.meterDeltaEvent(context, "offloaders_active", 1); + public IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, + String connectionId) { + METERING_CLOSURE.meterIncrementEvent(ctx, "offloader_created"); + METERING_CLOSURE.meterDeltaEvent(ctx, "offloaders_active", 1); return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, - new StreamManager(context, connectionId)); + new StreamManager(ctx, connectionId)); } @AllArgsConstructor static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { private final CodedOutputStream codedOutputStream; private final ByteBuffer byteBuffer; - final Context streamContext; + final ConnectionContext streamContext; @Override public @NonNull CodedOutputStream getOutputStream() { return codedOutputStream; @@ -90,32 +85,33 @@ static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { } class StreamManager extends OrderedStreamLifecyleManager { - Context telemetryContext; + ConnectionContext telemetryContext; String connectionId; Instant startTime; - public StreamManager(Context incomingTelemetryContext, String connectionId) { - this.telemetryContext = incomingTelemetryContext; + public StreamManager(ConnectionContext incomingTelemetryContext, String connectionId) { + this.telemetryContext = new ConnectionContext(incomingTelemetryContext, + METERING_CLOSURE.makeSpan(incomingTelemetryContext, "offloaderLifetime")); this.connectionId = connectionId; this.startTime = Instant.now(); } @Override public void close() throws IOException { - log.atInfo().setMessage(()->"factory.close()").log(); + log.atInfo().setMessage(() -> "factory.close()").log(); METERING_CLOSURE.meterHistogramMillis(telemetryContext, "offloader_stream_lifetime", - Duration.between(startTime, Instant.now())); + Duration.between(startTime, Instant.now())); METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", -1); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_closed"); - Span.fromContext(telemetryContext).end(); + telemetryContext.currentSpan.end(); } @Override public CodedOutputStreamWrapper createStream() { METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_created"); - var newStreamCtx = telemetryContext - .with(METERING_CLOSURE.tracer.spanBuilder("recordStream").startSpan()); + var newStreamCtx = new ConnectionContext(telemetryContext, + METERING_CLOSURE.makeSpan(telemetryContext, "recordStream")); ByteBuffer bb = ByteBuffer.allocate(bufferSize); return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb, newStreamCtx); @@ -140,18 +136,10 @@ public CodedOutputStreamWrapper createStream() { var cf = new CompletableFuture(); log.debug("Sending Kafka producer record: {} for topic: {}", recordId, topicNameForTraffic); - Context flushContext; - Span.fromContext(osh.streamContext).end(); - try (var scope = telemetryContext - .with(RECORD_ID_KEY, recordId) - .with(TOPIC_KEY, topicNameForTraffic) - .with(RECORD_SIZE_KEY, kafkaRecord.value().length) - .with(START_FLUSH_KEY, Instant.now()) - .makeCurrent()) { - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); - flushContext = Context.current() - .with(METERING_CLOSURE.tracer.spanBuilder("flushRecord").startSpan()); - } + var flushContext = new KafkaRecordContext(telemetryContext, + METERING_CLOSURE.makeSpan(telemetryContext, "flushRecord"), + topicNameForTraffic, recordId, kafkaRecord.value().length); + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); // Async request to Kafka cluster producer.send(kafkaRecord, handleProducerRecordSent(cf, recordId, flushContext)); @@ -168,40 +156,42 @@ public CodedOutputStreamWrapper createStream() { throw e; } } + } - /** - * The default KafkaProducer comes with built-in retry and error-handling logic that suits many cases. From the - * documentation here for retry: https://kafka.apache.org/35/javadoc/org/apache/kafka/clients/producer/KafkaProducer.html - * "If the request fails, the producer can automatically retry. The retries setting defaults to Integer.MAX_VALUE, - * and it's recommended to use delivery.timeout.ms to control retry behavior, instead of retries." - *

- * Apart from this the KafkaProducer has logic for deciding whether an error is transient and should be - * retried or not retried at all: https://kafka.apache.org/35/javadoc/org/apache/kafka/common/errors/RetriableException.html - * as well as basic retry backoff - */ - private Callback handleProducerRecordSent(CompletableFuture cf, String recordId, - Context flushContext) { - return (metadata, exception) -> { - log.atInfo().setMessage(()->"kafka completed sending a record").log(); - METERING_CLOSURE.meterHistogramMicros(telemetryContext, - exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms", - Duration.between(flushContext.get(START_FLUSH_KEY), Instant.now())); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, - exception==null ? "stream_flush_success" : "stream_flush_failure"); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, - exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", - flushContext.get(RECORD_SIZE_KEY)); - Span.fromContext(flushContext).end(); - - if (exception != null) { - log.error("Error sending producer record: {}", recordId, exception); - cf.completeExceptionally(exception); - } else { - log.debug("Kafka producer record: {} has finished sending for topic: {} and partition {}", - recordId, metadata.topic(), metadata.partition()); - cf.complete(metadata); - } - }; - } + /** + * The default KafkaProducer comes with built-in retry and error-handling logic that suits many cases. From the + * documentation here for retry: https://kafka.apache.org/35/javadoc/org/apache/kafka/clients/producer/KafkaProducer.html + * "If the request fails, the producer can automatically retry. The retries setting defaults to Integer.MAX_VALUE, + * and it's recommended to use delivery.timeout.ms to control retry behavior, instead of retries." + *

+ * Apart from this the KafkaProducer has logic for deciding whether an error is transient and should be + * retried or not retried at all: https://kafka.apache.org/35/javadoc/org/apache/kafka/common/errors/RetriableException.html + * as well as basic retry backoff + */ + private Callback handleProducerRecordSent(CompletableFuture cf, String recordId, + KafkaRecordContext flushContext) { + // Keep this out of the inner class because it is more unsafe to include it within + // the inner class since the inner class has context that shouldn't be used. This keeps + // that field out of scope. + return (metadata, exception) -> { + log.atInfo().setMessage(()->"kafka completed sending a record").log(); + METERING_CLOSURE.meterHistogramMicros(flushContext, + exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms"); + METERING_CLOSURE.meterIncrementEvent(flushContext, + exception==null ? "stream_flush_success" : "stream_flush_failure"); + METERING_CLOSURE.meterIncrementEvent(flushContext, + exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", + flushContext.getRecordSize()); + flushContext.currentSpan.end(); + + if (exception != null) { + log.error("Error sending producer record: {}", recordId, exception); + cf.completeExceptionally(exception); + } else { + log.debug("Kafka producer record: {} has finished sending for topic: {} and partition {}", + recordId, metadata.topic(), metadata.partition()); + cf.complete(metadata); + } + }; } } diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java new file mode 100644 index 000000000..006112753 --- /dev/null +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -0,0 +1,49 @@ +package org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.opensearch.migrations.tracing.IConnectionContext; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithStartTime; + +import java.time.Instant; + +@AllArgsConstructor +public class KafkaRecordContext implements IWithAttributes, IWithStartTime { + static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); + static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); + static final AttributeKey RECORD_SIZE_ATTR = AttributeKey.longKey("recordSize"); + + @Getter + public final IConnectionContext enclosingScope; + @Getter + public final Span currentSpan; + @Getter + public final Instant startTime; + @Getter + public final String topic; + @Getter + public final String recordId; + @Getter + public final int recordSize; + + public KafkaRecordContext(IConnectionContext enclosingScope, Span currentSpan, + String topic, String recordId, int recordSize) { + this.enclosingScope = enclosingScope; + this.currentSpan = currentSpan; + this.topic = topic; + this.recordId = recordId; + this.recordSize = recordSize; + this.startTime = Instant.now(); + } + + @Override + public AttributesBuilder fillAttributes(AttributesBuilder builder) { + return builder.put(TOPIC_ATTR, getTopic()) + .put(RECORD_ID_ATTR, getRecordId()) + .put(RECORD_SIZE_ATTR, getRecordSize()); + } +} diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 732cc7df6..7099f6317 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader; import io.netty.buffer.Unpooled; +import io.opentelemetry.api.GlobalOpenTelemetry; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.ApiVersions; import org.apache.kafka.clients.producer.Callback; @@ -18,7 +19,9 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.migrations.tracing.EmptyContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -51,7 +54,7 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, MockProducer producer = new MockProducer<>(true, new StringSerializer(), new ByteArraySerializer()); KafkaCaptureFactory kafkaCaptureFactory = new KafkaCaptureFactory(TEST_NODE_ID_STRING, producer, maxAllowableMessageSize); - IChannelConnectionCaptureSerializer serializer = kafkaCaptureFactory.createOffloader(connectionId); + IChannelConnectionCaptureSerializer serializer = kafkaCaptureFactory.createOffloader(createCtx(), connectionId); StringBuilder sb = new StringBuilder(); for (int i = 0; i < 15000; i++) { @@ -73,6 +76,11 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, producer.close(); } + private static ConnectionContext createCtx() { + return new ConnectionContext("test", "test", + GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); + } + /** * This size calculation is based off the KafkaProducer client request size validation check done when Producer * records are sent. This validation appears to be consistent for several versions now, here is a reference to @@ -97,7 +105,7 @@ private int calculateRecordSize(ProducerRecord record, String re public void testLinearOffloadingIsSuccessful() throws IOException { KafkaCaptureFactory kafkaCaptureFactory = new KafkaCaptureFactory(TEST_NODE_ID_STRING, mockProducer, 1024*1024); - IChannelConnectionCaptureSerializer offloader = kafkaCaptureFactory.createOffloader(connectionId); + IChannelConnectionCaptureSerializer offloader = kafkaCaptureFactory.createOffloader(createCtx(), connectionId); List recordSentCallbacks = new ArrayList<>(3); when(mockProducer.send(any(), any())).thenAnswer(invocation -> { diff --git a/TrafficCapture/captureOffloader/build.gradle b/TrafficCapture/captureOffloader/build.gradle index c375d39a2..9c183b4a0 100644 --- a/TrafficCapture/captureOffloader/build.gradle +++ b/TrafficCapture/captureOffloader/build.gradle @@ -20,11 +20,14 @@ sourceSets { } } dependencies { + implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") api group: 'io.netty', name: 'netty-buffer', version: '4.1.100.Final' implementation project(':captureProtobufs') - implementation "com.google.protobuf:protobuf-java:3.22.2" - implementation 'org.projectlombok:lombok:1.18.26' + implementation project(':coreUtilities') + implementation group: 'io.opentelemetry', name:'opentelemetry-api' + implementation group: 'com.google.protobuf', name: 'protobuf-java', version: '3.22.2' + implementation group: 'org.projectlombok', name: 'lombok', version: '1.18.26' implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java index 751d39f63..b7ce9c029 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java @@ -3,6 +3,7 @@ import lombok.AllArgsConstructor; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.FileNotFoundException; import java.io.FileOutputStream; @@ -83,7 +84,7 @@ public CodedOutputStreamAndByteBufferWrapper createStream() { } @Override - public IChannelConnectionCaptureSerializer createOffloader(String connectionId) { + public IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, String connectionId) { return new StreamChannelConnectionCaptureSerializer(nodeId, connectionId, new StreamManager(connectionId)); } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java index 9f5ec26c0..c5c5270e5 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java @@ -1,7 +1,9 @@ package org.opensearch.migrations.trafficcapture; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; + import java.io.IOException; public interface IConnectionCaptureFactory { - IChannelConnectionCaptureSerializer createOffloader(String connectionId) throws IOException; + IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, String connectionId) throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java new file mode 100644 index 000000000..6df11b03d --- /dev/null +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -0,0 +1,33 @@ +package org.opensearch.migrations.trafficcapture.tracing; + +import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import org.opensearch.migrations.tracing.IConnectionContext; +import org.opensearch.migrations.tracing.IWithStartTime; + +import java.time.Instant; + +public class ConnectionContext implements IConnectionContext, IWithStartTime { + @Getter + public final String connectionId; + @Getter + public final String nodeId; + @Getter + public final Span currentSpan; + @Getter + private final Instant startTime; + + public ConnectionContext(ConnectionContext oldContext, Span currentSpan) { + this.connectionId = oldContext.getConnectionId(); + this.nodeId = oldContext.getNodeId(); + this.currentSpan = currentSpan; + this.startTime = Instant.now(); + } + + public ConnectionContext(String connectionId, String nodeId, Span currentSpan) { + this.connectionId = connectionId; + this.nodeId = nodeId; + this.currentSpan = currentSpan; + this.startTime = Instant.now(); + } +} diff --git a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java index b63ef52af..3c899e2eb 100644 --- a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java @@ -1,14 +1,12 @@ package org.opensearch.migrations.trafficcapture; -import com.google.protobuf.CodedOutputStream; import com.google.protobuf.InvalidProtocolBufferException; import lombok.AllArgsConstructor; import lombok.Getter; -import lombok.NonNull; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.Arrays; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentLinkedQueue; @@ -62,7 +60,7 @@ protected CompletableFuture kickoffCloseStream(CodedOutputStreamHolder out } @Override - public IChannelConnectionCaptureSerializer createOffloader(String connectionId) throws IOException { + public IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, String connectionId) throws IOException { // This array is only an indirection to work around Java's constraint that lambda values are final return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager()); } diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index 355e2ec38..fbe9a0307 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -53,9 +53,9 @@ dependencies { // Log4j implementation(platform("org.apache.logging.log4j:log4j-bom:2.21.1")) - implementation("org.apache.logging.log4j:log4j-api") - implementation("org.apache.logging.log4j:log4j-core") - implementation("org.apache.logging.log4j:log4j-slf4j2-impl:2.20.0") + implementation group: 'org.apache.logging.log4j', name: 'log4j-api' + implementation group: 'org.apache.logging.log4j', name :'log4j-core' + implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' // OpenTelemetry core implementation group: 'io.opentelemetry', name:'opentelemetry-api' diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java index 1ff13ec47..f890264f2 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java @@ -1,10 +1,9 @@ package org.opensearch.migrations.coreutils; import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.context.Context; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; @@ -17,8 +16,11 @@ import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Duration; +import java.time.Instant; import java.util.concurrent.TimeUnit; public class SimpleMeteringClosure { @@ -77,53 +79,61 @@ public static void initializeOpenTelemetry(String serviceName, String collectorE //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); } - public void meterIncrementEvent(Context ctx, String eventName) { + public void meterIncrementEvent(IWithAttributes ctx, String eventName) { meterIncrementEvent(ctx, eventName, 1); } - public void meterIncrementEvent(Context ctx, String eventName, long increment) { + public void meterIncrementEvent(IWithAttributes ctx, String eventName, long increment) { if (ctx == null) { return; } - try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.counterBuilder(eventName) - .build().add(increment, Attributes.builder() - .put("labelName", eventName) - .build()); - } + meter.counterBuilder(eventName) + .build().add(increment, ctx.getPopulatedAttributesBuilder() + .put("labelName", eventName) + .build()); } - public void meterDeltaEvent(Context ctx, String eventName, long delta) { + public void meterDeltaEvent(IWithAttributes ctx, String eventName, long delta) { if (ctx == null) { return; } - try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.upDownCounterBuilder(eventName) - .build().add(delta, Attributes.builder() - .put("labelName", eventName) - .build()); - } + meter.upDownCounterBuilder(eventName) + .build().add(delta, ctx.getPopulatedAttributesBuilder() + .put("labelName", eventName) + .build()); } - public void meterHistogramMillis(Context ctx, String eventName, Duration between) { + public void meterHistogramMillis(T ctx, String eventName) { + meterHistogram(ctx, eventName, "ms", Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); + } + + public void meterHistogramMicros(T ctx, String eventName) { + meterHistogram(ctx, eventName, "us", Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); + } + + public void meterHistogramMillis(IWithAttributes ctx, String eventName, Duration between) { meterHistogram(ctx, eventName, "ms", between.toMillis()); } - public void meterHistogramMicros(Context ctx, String eventName, Duration between) { + public void meterHistogramMicros(IWithAttributes ctx, String eventName, Duration between) { meterHistogram(ctx, eventName, "us", between.toNanos()*1000); } - public void meterHistogram(Context ctx, String eventName, String units, long value) { + public void meterHistogram(IWithAttributes ctx, String eventName, String units, long value) { if (ctx == null) { return; } - try (var namedOnlyForAutoClose = ctx.makeCurrent()) { - meter.histogramBuilder(eventName) - .ofLongs() - .setUnit(units) - .build().record(value, Attributes.builder() - .put("labelName", eventName) - .build()); - } + meter.histogramBuilder(eventName) + .ofLongs() + .setUnit(units) + .build().record(value, ctx.getPopulatedAttributesBuilder() + .put("labelName", eventName) + .build()); + } + + public Span makeSpan(IWithAttributes ctx, String spanName) { + var span = tracer.spanBuilder(spanName).startSpan(); + span.setAllAttributes(ctx.getPopulatedAttributesBuilder().build()); + return span; } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java new file mode 100644 index 000000000..80d419891 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java @@ -0,0 +1,10 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.trace.Span; +import lombok.AllArgsConstructor; + +@AllArgsConstructor +public class ContextWithSpan> { + public final T context; + public final Span span; +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java new file mode 100644 index 000000000..d44a356c3 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java @@ -0,0 +1,25 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; + +public class EmptyContext implements IWithAttributes { + public static final EmptyContext singleton = new EmptyContext(); + + private EmptyContext() {} + + @Override + public Span getCurrentSpan() { + throw new IllegalStateException("This class doesn't track spans"); + } + + @Override + public IWithAttributes getEnclosingScope() { + return null; + } + + @Override + public AttributesBuilder fillAttributes(AttributesBuilder builder) { + return builder; // nothing more to do + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java new file mode 100644 index 000000000..075ba18f1 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java @@ -0,0 +1,21 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; + +public interface IConnectionContext extends IWithAttributes { + static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); + static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); + + String getConnectionId(); + String getNodeId(); + + @Override + default EmptyContext getEnclosingScope() { return EmptyContext.singleton; } + + @Override + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return builder.put(CONNECTION_ID_ATTR, getConnectionId()) + .put(NODE_ID_ATTR, getNodeId()); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java new file mode 100644 index 000000000..5bb61fce0 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java @@ -0,0 +1,16 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; + +public interface IReplayerRequestContext extends IRequestContext { + static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); + + long replayerRequestIndex(); + + @Override + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return IRequestContext.super.fillAttributes( + builder.put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex())); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java new file mode 100644 index 000000000..67744ff5f --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java @@ -0,0 +1,16 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; + +public interface IRequestContext extends IConnectionContext { + static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); + + long sourceRequestIndex(); + + @Override + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return IConnectionContext.super.fillAttributes( + builder.put(SOURCE_REQUEST_INDEX_KEY, sourceRequestIndex())); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java new file mode 100644 index 000000000..75d443d2d --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java @@ -0,0 +1,30 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; + +import java.util.ArrayList; + +public interface IWithAttributes { + T getEnclosingScope(); + + Span getCurrentSpan(); + + AttributesBuilder fillAttributes(AttributesBuilder builder); + + default AttributesBuilder getPopulatedAttributesBuilder() { + var currentObj = this; + var stack = new ArrayList(); + var builder = Attributes.builder(); + while (currentObj != null) { + stack.add(currentObj); + currentObj = currentObj.getEnclosingScope(); + } + // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts + for (int i=stack.size()-1; i>=0; --i) { + builder = stack.get(i).fillAttributes(builder); + } + return builder; + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java new file mode 100644 index 000000000..b8e362ddb --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java @@ -0,0 +1,7 @@ +package org.opensearch.migrations.tracing; + +import java.time.Instant; + +public interface IWithStartTime { + Instant getStartTime(); +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index 13b6edf87..de3391fbe 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -3,15 +3,12 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import io.netty.util.ReferenceCountUtil; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.context.Context; import io.opentelemetry.context.ContextKey; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; -import java.time.Duration; import java.time.Instant; import java.util.function.Predicate; @@ -20,7 +17,7 @@ public class ConditionallyReliableLoggingHttpRequestHandler extends LoggingHt private ContextKey START_FLUSH_KEY = ContextKey.named("startTime"); private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpRequestHandler(Context incomingContext, + public ConditionallyReliableLoggingHttpRequestHandler(ConnectionContext incomingContext, IChannelConnectionCaptureSerializer trafficOffloader, Predicate headerPredicateForWhenToBlock) { super(incomingContext, trafficOffloader); @@ -31,22 +28,17 @@ public ConditionallyReliableLoggingHttpRequestHandler(Context incomingContext, protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { if (shouldBlockPredicate.test(httpRequest)) { - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "blockingRequestUntilFlush"); - Context flushContext; - try (var namedOnlyForAutoClose = telemetryContext - .with(START_FLUSH_KEY, Instant.now()) - .makeCurrent()) { - flushContext = Context.current() - .with(METERING_CLOSURE.tracer.spanBuilder("blockedForFlush").startSpan()); - } + METERING_CLOSURE.meterIncrementEvent(connectionContext, "blockingRequestUntilFlush"); + var flushContext = new ConnectionContext(connectionContext, + METERING_CLOSURE.tracer.spanBuilder("blockedForFlush").startSpan()); + trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); METERING_CLOSURE.meterIncrementEvent(flushContext, t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); METERING_CLOSURE.meterHistogramMicros(flushContext, - t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro", - Duration.between(flushContext.get(START_FLUSH_KEY), Instant.now())); - Span.fromContext(flushContext).end(); + t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro"); + flushContext.currentSpan.end(); if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users @@ -63,7 +55,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob } }); } else { - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "nonBlockingRequest"); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "nonBlockingRequest"); super.channelFinishedReadingAnHttpMessage(ctx, msg, httpRequest); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index 0d300eb57..7f3191dc0 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -13,9 +13,6 @@ import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpVersion; import io.netty.handler.codec.http.LastHttpContent; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.context.Context; import lombok.Getter; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; @@ -24,6 +21,7 @@ import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.time.Instant; @@ -78,17 +76,13 @@ public HttpRequest resetCurrentRequest() { protected final EmbeddedChannel httpDecoderChannel; protected final SimpleHttpRequestDecoder requestDecoder; - protected final Context telemetryContext; - private final Instant createdTime; + protected final ConnectionContext connectionContext; - - public LoggingHttpRequestHandler(Context incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { - this.createdTime = Instant.now(); - try (var scope = incomingContext.makeCurrent()) { - var span = METERING_CLOSURE.tracer.spanBuilder("frontendConnection").startSpan(); - telemetryContext = incomingContext.with(span); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "requestStarted"); - } + public LoggingHttpRequestHandler(ConnectionContext incomingContext, + IChannelConnectionCaptureSerializer trafficOffloader) { + this.connectionContext = incomingContext; + var span = METERING_CLOSURE.makeSpan(incomingContext, "frontendConnection"); + METERING_CLOSURE.meterIncrementEvent(incomingContext, "requestStarted"); this.trafficOffloader = trafficOffloader; requestDecoder = new SimpleHttpRequestDecoder(); // as a field for easier debugging @@ -103,7 +97,7 @@ private HttpProcessedState parseHttpMessageParts(ByteBuf msg) { var state = getHandlerThatHoldsParsedHttpRequest().isDone ? HttpProcessedState.FULL_MESSAGE : HttpProcessedState.ONGOING; - METERING_CLOSURE.meterIncrementEvent(telemetryContext, + METERING_CLOSURE.meterIncrementEvent(connectionContext, state == HttpProcessedState.FULL_MESSAGE ? "requestFullyParsed" : "requestPartiallyParsed"); return state; } @@ -115,7 +109,7 @@ private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "unregistered"); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "unregistered"); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -132,8 +126,8 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "handlerRemoved"); - Span.fromContext(telemetryContext).end(); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "handlerRemoved"); + connectionContext.getCurrentSpan().end(); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { @@ -150,7 +144,7 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { super.channelRead(ctx, msg); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "requestReceived"); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "requestReceived"); metricsLogger.atSuccess(MetricsEvent.RECEIVED_FULL_HTTP_REQUEST) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()) @@ -165,8 +159,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { var bb = ((ByteBuf) msg).retainedDuplicate(); trafficOffloader.addReadEvent(timestamp, bb); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "read"); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "readBytes", bb.readableBytes()); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "read"); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "readBytes", bb.readableBytes()); metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); @@ -191,7 +185,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "exception"); + METERING_CLOSURE.meterIncrementEvent(connectionContext, "exception"); httpDecoderChannel.close(); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java index ed8a8d55c..94e93d021 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java @@ -4,15 +4,13 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelOutboundHandlerAdapter; import io.netty.channel.ChannelPromise; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.context.Context; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.net.SocketAddress; import java.time.Duration; @@ -25,10 +23,10 @@ public class LoggingHttpResponseHandler extends ChannelOutboundHandlerAdapter private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpResponseHandler"); private final IChannelConnectionCaptureSerializer trafficOffloader; - private Context telemetryContext; + private ConnectionContext telemetryContext; private Instant connectTime; - public LoggingHttpResponseHandler(Context incomingContext, + public LoggingHttpResponseHandler(ConnectionContext incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { this.trafficOffloader = trafficOffloader; this.telemetryContext = incomingContext; @@ -45,9 +43,8 @@ public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelP public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); - var span = GlobalOpenTelemetry.get().getTracer(TELEMETRY_SCOPE_NAME) - .spanBuilder("backendConnection").startSpan(); - telemetryContext = telemetryContext.with(span); + var span = METERING_CLOSURE.makeSpan(telemetryContext,"backendConnection"); + telemetryContext = new ConnectionContext(telemetryContext, span); connectTime = Instant.now(); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "connect"); METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", 1); @@ -70,7 +67,7 @@ public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exce METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", -1); METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connectionDuration", Duration.between(connectTime, Instant.now())); - Span.fromContext(telemetryContext).end(); + telemetryContext.currentSpan.end(); } @Override diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index 76a2cc762..86cd36ea5 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -6,15 +6,11 @@ import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.context.Context; -import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; -import io.opentelemetry.sdk.trace.data.SpanData; import lombok.AllArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @@ -24,10 +20,10 @@ import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.ByteArrayInputStream; -import java.io.FileOutputStream; import java.io.IOException; import java.io.SequenceInputStream; import java.nio.ByteBuffer; @@ -87,12 +83,10 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumertrue)); // true: block every request + new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, x->true)); // true: block every request channelWriter.accept(channel); // we wrote the correct data to the downstream handler/channel diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 97b2a8293..c4736c75f 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -174,7 +174,7 @@ private static Settings getSettings(@NonNull String configFile) { private static IConnectionCaptureFactory getNullConnectionCaptureFactory() { System.err.println("No trace log directory specified. Logging to /dev/null"); - return connectionId -> new StreamChannelConnectionCaptureSerializer<>(null, connectionId, + return (ctx, connectionId) -> new StreamChannelConnectionCaptureSerializer<>(null, connectionId, new StreamLifecycleManager<>() { @Override public void close() {} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 2ec2eb3e3..2e48c9765 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -5,18 +5,19 @@ import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.ssl.SslHandler; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.ContextKey; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.EmptyContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpRequestHandler; import org.opensearch.migrations.trafficcapture.netty.LoggingHttpResponseHandler; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import javax.net.ssl.SSLEngine; import java.io.IOException; import java.util.function.Supplier; public class ProxyChannelInitializer extends ChannelInitializer { - static final ContextKey CONNECTION_ID_KEY = ContextKey.named("connectionId"); + static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("FrontendConnection"); private final IConnectionCaptureFactory connectionCaptureFactory; private final Supplier sslEngineProvider; @@ -45,8 +46,9 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - var offloader = connectionCaptureFactory.createOffloader(connectionId); - var ctx = Context.current().with(CONNECTION_ID_KEY, connectionId); + var ctx = new ConnectionContext(connectionId, "", + METERING_CLOSURE.makeSpan(EmptyContext.singleton, "connectionLifetime")); + var offloader = connectionCaptureFactory.createOffloader(ctx, connectionId); ch.pipeline().addLast(new LoggingHttpResponseHandler<>(ctx, offloader)); ch.pipeline().addLast(new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, this::shouldGuaranteeMessageOffloading)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index b3f65daf2..fa474ac4e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -1,15 +1,21 @@ package org.opensearch.migrations.replay; import lombok.NonNull; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; +import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.tracing.EmptyContext; import java.time.Instant; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; public class Accumulation { + public static final String TELEMETRY_SCOPE_NAME = "Accumulator"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); enum State { // Ignore all initial READs, the first EOM & the following WRITEs (if they or EOMs exist) @@ -22,12 +28,14 @@ enum State { } public final ISourceTrafficChannelKey trafficChannelKey; + public final ChannelKeyContext channelContext; private RequestResponsePacketPair rrPair; AtomicLong newestPacketTimestampInMillis; State state; AtomicInteger numberOfResets; final int startingSourceRequestIndex; + public Accumulation(@NonNull ITrafficStreamKey trafficChannelKey, int startingSourceRequestIndex) { this(trafficChannelKey, startingSourceRequestIndex, false); } @@ -40,19 +48,26 @@ public Accumulation(@NonNull ITrafficStreamKey trafficChannelKey, this.startingSourceRequestIndex = startingSourceRequestIndex; this.state = dropObservationsLeftoverFromPrevious ? State.IGNORING_LAST_REQUEST : State.WAITING_FOR_NEXT_READ_CHUNK; + channelContext = new ChannelKeyContext(trafficChannelKey, + METERING_CLOSURE.makeSpan(EmptyContext.singleton, "accumulatingChannel")); } public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey forTrafficStreamKey) { if (rrPair != null) { return rrPair; } - rrPair = new RequestResponsePacketPair(forTrafficStreamKey); + this.rrPair = new RequestResponsePacketPair(forTrafficStreamKey, + new RequestContext(getRequestKey(forTrafficStreamKey), + METERING_CLOSURE.makeSpan(channelContext, "accumulatingRequest"))); return rrPair; } public UniqueReplayerRequestKey getRequestKey() { - return new UniqueReplayerRequestKey(getRrPair().getBeginningTrafficStreamKey(), - startingSourceRequestIndex, getIndexOfCurrentRequest()); + return getRequestKey(getRrPair().getBeginningTrafficStreamKey()); + } + + private UniqueReplayerRequestKey getRequestKey(@NonNull ITrafficStreamKey tsk) { + return new UniqueReplayerRequestKey(tsk, startingSourceRequestIndex, getIndexOfCurrentRequest()); } public boolean hasSignaledRequests() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index 3f2504862..28d988f3d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -4,7 +4,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import java.time.Instant; @@ -15,10 +15,10 @@ void onRequestReceived(@NonNull UniqueReplayerRequestKey key, RequestContext ctx @NonNull HttpMessageAndTimestamp request); void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, RequestContext ctx, @NonNull RequestResponsePacketPair rrpp); - void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, ConnectionContext ctx, + void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, ChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); - void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, ConnectionContext ctx, + void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); - void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ConnectionContext ctx); + void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ChannelKeyContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 29fc83126..35cde47fd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -1,16 +1,11 @@ package org.opensearch.migrations.replay; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.ContextKey; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -54,8 +49,6 @@ */ @Slf4j public class CapturedTrafficToHttpTransactionAccumulator { - public static final String TELEMETRY_SCOPE_NAME = "Accumulator"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); public static final Duration EXPIRATION_GRANULARITY = Duration.ofSeconds(1); private final ExpiringTrafficStreamMap liveStreams; @@ -136,10 +129,9 @@ public void accept(ITrafficStreamWithKey trafficStreamAndKey) { var tsk = trafficStreamAndKey.getKey(); var accum = liveStreams.getOrCreateWithoutExpiration(tsk, k->createInitialAccumulation(trafficStreamAndKey)); var trafficStream = trafficStreamAndKey.getStream(); - var ctx = new ConnectionContext(tsk); for (int i=0; i"Connection terminated: removing " + partitionId + ":" + connectionId + " from liveStreams map").log(); @@ -153,7 +145,7 @@ public void accept(ITrafficStreamWithKey trafficStreamAndKey) { assert accum.state == Accumulation.State.WAITING_FOR_NEXT_READ_CHUNK || accum.state == Accumulation.State.IGNORING_LAST_REQUEST || trafficStream.getSubStreamCount() == 0; - listener.onTrafficStreamIgnored(tsk, ctx); + listener.onTrafficStreamIgnored(tsk, accum.channelContext); } } @@ -183,16 +175,15 @@ private enum CONNECTION_STATUS { public CONNECTION_STATUS addObservationToAccumulation(@NonNull Accumulation accum, @NonNull ITrafficStreamKey trafficStreamKey, - ConnectionContext ctx, TrafficObservation observation) { log.atTrace().setMessage(()->"Adding observation: "+observation).log(); var timestamp = TrafficStreamUtils.instantFromProtoTimestamp(observation.getTs()); liveStreams.expireOldEntries(trafficStreamKey, accum, timestamp); - return handleCloseObservationThatAffectEveryState(accum, observation, trafficStreamKey, ctx, timestamp) + return handleCloseObservationThatAffectEveryState(accum, observation, trafficStreamKey, timestamp) .or(() -> handleObservationForSkipState(accum, observation)) - .or(() -> handleObservationForReadState(accum, ctx, observation, trafficStreamKey, timestamp)) - .or(() -> handleObservationForWriteState(accum, ctx, observation, trafficStreamKey, timestamp)) + .or(() -> handleObservationForReadState(accum, observation, trafficStreamKey, timestamp)) + .or(() -> handleObservationForWriteState(accum, observation, trafficStreamKey, timestamp)) .orElseGet(() -> { log.atWarn().setMessage(()->"unaccounted for observation type " + observation).log(); return CONNECTION_STATUS.ALIVE; @@ -225,19 +216,18 @@ private static List getTrafficStreamsHeldByAccum(Accumulation handleCloseObservationThatAffectEveryState(Accumulation accum, TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, - ConnectionContext ctx, Instant timestamp) { if (observation.hasClose()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); - rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum, ctx); + rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); closedConnectionCounter.incrementAndGet(); - listener.onConnectionClose(accum.trafficChannelKey, accum.getIndexOfCurrentRequest(), ctx, + listener.onConnectionClose(accum.trafficChannelKey, accum.getIndexOfCurrentRequest(), accum.channelContext, RequestResponsePacketPair.ReconstructionStatus.COMPLETE, timestamp, getTrafficStreamsHeldByAccum(accum)); return Optional.of(CONNECTION_STATUS.CLOSED); } else if (observation.hasConnectionException()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); - rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum, ctx); + rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); exceptionConnectionCounter.incrementAndGet(); accum.resetForNextRequest(); log.atDebug().setMessage(()->"Removing accumulated traffic pair due to " + @@ -249,7 +239,6 @@ private static List getTrafficStreamsHeldByAccum(Accumulation } private Optional handleObservationForReadState(@NonNull Accumulation accum, - ConnectionContext ctx, TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, Instant timestamp) { @@ -268,7 +257,7 @@ private Optional handleObservationForReadState(@NonNull Accum log.atTrace().setMessage(() -> "Added request data for accum[" + connectionId + "]=" + accum).log(); } else if (observation.hasEndOfMessageIndicator()) { assert accum.hasRrPair(); - handleEndOfRequest(accum, ctx); + handleEndOfRequest(accum); } else if (observation.hasReadSegment()) { log.atTrace().setMessage(()->"Adding request segment for accum[" + connectionId + "]=" + accum).log(); var rrPair = accum.getOrCreateTransactionPair(trafficStreamKey); @@ -286,7 +275,7 @@ private Optional handleObservationForReadState(@NonNull Accum return Optional.of(CONNECTION_STATUS.ALIVE); } - private Optional handleObservationForWriteState(Accumulation accum, ConnectionContext ctx, + private Optional handleObservationForWriteState(Accumulation accum, TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, Instant timestamp) { @@ -313,8 +302,8 @@ private Optional handleObservationForWriteState(Accumulation assert rrPair.responseData.hasInProgressSegment(); rrPair.responseData.finalizeRequestSegments(timestamp); } else if (observation.hasRead() || observation.hasReadSegment()) { - rotateAccumulationOnReadIfNecessary(connectionId, accum, ctx); - return handleObservationForReadState(accum, ctx, observation, trafficStreamKey, timestamp); + rotateAccumulationOnReadIfNecessary(connectionId, accum); + return handleObservationForReadState(accum, observation, trafficStreamKey, timestamp); } return Optional.of(CONNECTION_STATUS.ALIVE); @@ -323,20 +312,19 @@ private Optional handleObservationForWriteState(Accumulation // This function manages the transition case when an observation comes in that would terminate // any previous HTTP transaction for the connection. It returns true if there WAS a previous // transaction that has been reset and false otherwise - private boolean rotateAccumulationIfNecessary(String connectionId, Accumulation accum, ConnectionContext ctx) { + private boolean rotateAccumulationIfNecessary(String connectionId, Accumulation accum) { // If this was brand new, we don't need to care about triggering the callback. // We only need to worry about this if we have yet to send the RESPONSE. if (accum.state == Accumulation.State.ACCUMULATING_WRITES) { log.atDebug().setMessage(()->"Resetting accum[" + connectionId + "]=" + accum).log(); - handleEndOfResponse(accum, ctx, RequestResponsePacketPair.ReconstructionStatus.COMPLETE); + handleEndOfResponse(accum, RequestResponsePacketPair.ReconstructionStatus.COMPLETE); return true; } return false; } - private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumulation accum, - ConnectionContext ctx) { - if (rotateAccumulationIfNecessary(connectionId, accum, ctx)) { + private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumulation accum) { + if (rotateAccumulationIfNecessary(connectionId, accum)) { reusedKeepAliveCounter.incrementAndGet(); return true; } else { @@ -347,30 +335,28 @@ private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumul /** * @return True if something was sent to the callback, false if nothing had been accumulated */ - private boolean handleEndOfRequest(Accumulation accumulation, ConnectionContext ctx) { + private boolean handleEndOfRequest(Accumulation accumulation) { assert accumulation.state == Accumulation.State.ACCUMULATING_READS : "state == " + accumulation.state; - var requestPacketBytes = accumulation.getRrPair().requestData; + var rrPair = accumulation.getRrPair(); + var requestPacketBytes = rrPair.requestData; metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) .setAttribute(MetricsAttributeKey.REQUEST_ID, accumulation.getRequestKey().toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); assert (requestPacketBytes != null); assert (!requestPacketBytes.hasInProgressSegment()); - var requestContext = new RequestContext(ctx, accumulation.getRequestKey()); - listener.onRequestReceived(accumulation.getRequestKey(), requestContext, requestPacketBytes); + listener.onRequestReceived(accumulation.getRequestKey(), rrPair.requestContext, requestPacketBytes); accumulation.state = Accumulation.State.ACCUMULATING_WRITES; return true; } - private void handleEndOfResponse(Accumulation accumulation, ConnectionContext ctx, - RequestResponsePacketPair.ReconstructionStatus status) { + private void handleEndOfResponse(Accumulation accumulation, RequestResponsePacketPair.ReconstructionStatus status) { assert accumulation.state == Accumulation.State.ACCUMULATING_WRITES; metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) .setAttribute(MetricsAttributeKey.REQUEST_ID, accumulation.getRequestKey().toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); var rrPair = accumulation.getRrPair(); rrPair.completionStatus = status; - var requestContext = new RequestContext(ctx, accumulation.getRequestKey()); - listener.onFullDataReceived(accumulation.getRequestKey(), requestContext, rrPair); + listener.onFullDataReceived(accumulation.getRequestKey(), rrPair.requestContext, rrPair); accumulation.resetForNextRequest(); } @@ -385,7 +371,6 @@ public void close() { private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, RequestResponsePacketPair.ReconstructionStatus status) { - ConnectionContext ctx = new ConnectionContext(accumulation.trafficChannelKey); try { switch (accumulation.state) { case ACCUMULATING_READS: @@ -400,12 +385,12 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, log.warn("Terminating a TrafficStream reconstruction w/out an accumulated value, " + "assuming an empty server interaction and NOT reproducing this to the target cluster."); if (accumulation.hasRrPair()) { - listener.onTrafficStreamsExpired(status, ctx, + listener.onTrafficStreamsExpired(status, accumulation.channelContext, Collections.unmodifiableList(accumulation.getRrPair().trafficStreamKeysBeingHeld)); } return; case ACCUMULATING_WRITES: - handleEndOfResponse(accumulation, ctx, status); + handleEndOfResponse(accumulation, status); break; case WAITING_FOR_NEXT_READ_CHUNK: case IGNORING_LAST_REQUEST: @@ -416,7 +401,8 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, } finally { if (accumulation.hasSignaledRequests()) { listener.onConnectionClose(accumulation.trafficChannelKey, accumulation.getIndexOfCurrentRequest(), - ctx, status, accumulation.getLastTimestamp(), getTrafficStreamsHeldByAccum(accumulation)); + accumulation.channelContext, status, accumulation.getLastTimestamp(), + getTrafficStreamsHeldByAccum(accumulation)); } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index 35ae238b8..e04d45e87 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -18,13 +18,11 @@ import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; import java.net.URI; -import java.time.Instant; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -69,7 +67,7 @@ public ConnectionReplaySession load(final String s) { } private DiagnosticTrackableCompletableFuture - getResilientClientChannelProducer(EventLoop eventLoop, ConnectionContext connectionContext) { + getResilientClientChannelProducer(EventLoop eventLoop, ChannelKeyContext connectionContext) { return new AdaptiveRateLimiter() .get(() -> { var clientConnectionChannelCreatedFuture = @@ -143,7 +141,7 @@ public void closeConnection(String connId) { } public Future - submitEventualSessionGet(ISourceTrafficChannelKey channelKey, boolean ignoreIfNotPresent, ConnectionContext ctx) { + submitEventualSessionGet(ISourceTrafficChannelKey channelKey, boolean ignoreIfNotPresent, ChannelKeyContext ctx) { ConnectionReplaySession channelFutureAndSchedule = getCachedSession(channelKey, ignoreIfNotPresent); if (channelFutureAndSchedule == null) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java index 3bb96fdee..e87ac3492 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java @@ -2,7 +2,8 @@ import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; public interface PacketConsumerFactory { - IPacketFinalizingConsumer create(UniqueReplayerRequestKey requestKey); + IPacketFinalizingConsumer create(UniqueReplayerRequestKey requestKey, RequestContext context); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index 5d03d7ddd..07b8c7647 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datahandlers.TransformedPacketReceiver; import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; @@ -14,6 +15,7 @@ @Slf4j public class PacketToTransformingHttpHandlerFactory implements PacketConsumerFactory> { + private final IJsonTransformer jsonTransformer; private final IAuthTransformerFactory authTransformerFactory; @@ -26,9 +28,9 @@ public PacketToTransformingHttpHandlerFactory(IJsonTransformer jsonTransformer, @Override public IPacketFinalizingConsumer> - create(UniqueReplayerRequestKey requestKey) { + create(UniqueReplayerRequestKey requestKey, RequestContext requestContext) { log.trace("creating HttpJsonTransformingConsumer"); return new HttpJsonTransformingConsumer<>(jsonTransformer, authTransformerFactory, - new TransformedPacketReceiver(), requestKey.toString(), new RequestContext(requestKey)); + new TransformedPacketReceiver(), requestKey.toString(), requestContext); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index eaecdcca6..f75b92331 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -9,7 +9,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; @@ -154,7 +154,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public void closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - ConnectionContext ctx, Instant timestamp) { + ChannelKeyContext ctx, Instant timestamp) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 9a2f085fe..45f686eb7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -4,6 +4,8 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.tracing.IRequestContext; import java.nio.charset.StandardCharsets; import java.time.Instant; @@ -25,10 +27,12 @@ public enum ReconstructionStatus { HttpMessageAndTimestamp responseData; List trafficStreamKeysBeingHeld; ReconstructionStatus completionStatus; + RequestContext requestContext; - public RequestResponsePacketPair(ITrafficStreamKey startingAtTrafficStreamKey) { + public RequestResponsePacketPair(ITrafficStreamKey startingAtTrafficStreamKey, RequestContext requestContext) { this.trafficStreamKeysBeingHeld = new ArrayList<>(); this.trafficStreamKeysBeingHeld.add(startingAtTrafficStreamKey); + this.requestContext = requestContext; } @NonNull ITrafficStreamKey getBeginningTrafficStreamKey() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index 94387dc3f..107c21bae 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -9,7 +9,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -64,14 +64,14 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { ()->"waiting for final aggregated response"); log.atDebug().setMessage(()->"Scheduling request for "+requestKey+" at start time "+start).log(); return asynchronouslyInvokeRunnableToSetupFuture(requestKey.getTrafficStreamKey(), - requestKey.getReplayerRequestIndex(), ctx, false, finalTunneledResponse, + requestKey.getReplayerRequestIndex(), ctx.getChannelKeyContext(), false, finalTunneledResponse, channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(requestKey, ctx, channelFutureAndRequestSchedule, finalTunneledResponse, start, interval, packets)); } public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - ConnectionContext ctx, + ChannelKeyContext ctx, Instant timestamp) { var channelInteraction = new IndexedChannelInteraction(channelKey, channelInteractionNum); var finalTunneledResponse = @@ -93,7 +93,7 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne private DiagnosticTrackableCompletableFuture asynchronouslyInvokeRunnableToSetupFuture(ISourceTrafficChannelKey channelKey, int channelInteractionNumber, - ConnectionContext ctx, boolean ignoreIfChannelNotPresent, + ChannelKeyContext ctx, boolean ignoreIfChannelNotPresent, DiagnosticTrackableCompletableFuture finalTunneledResponse, Consumer successFn) { var channelFutureAndScheduleFuture = @@ -145,7 +145,7 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne } private void scheduleOnConnectionReplaySession(ISourceTrafficChannelKey channelKey, int channelInteractionIdx, - ConnectionContext ctx, + ChannelKeyContext ctx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture futureToBeCompletedByTask, Instant atTime, String activityNameForLogging, Runnable task) { @@ -201,8 +201,9 @@ private void scheduleSendOnConnectionReplaySession(UniqueReplayerRequestKey requ getPacketReceiver(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); - scheduleOnConnectionReplaySession(requestKey.trafficStreamKey, requestKey.getSourceRequestIndex(), ctx, - channelFutureAndRequestSchedule, responseFuture, start, "send", packetSender); + scheduleOnConnectionReplaySession(requestKey.trafficStreamKey, requestKey.getSourceRequestIndex(), + ctx.getChannelKeyContext(), channelFutureAndRequestSchedule, responseFuture, start, + "send", packetSender); } private void runAfterChannelSetup(ConnectionReplaySession channelFutureAndItsFutureRequests, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 47e3ea27f..086cb14e2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -16,7 +16,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; @@ -676,7 +676,7 @@ Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, Re @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ConnectionContext ctx, List trafficStreamKeysBeingHeld) { + ChannelKeyContext ctx, List trafficStreamKeysBeingHeld) { commitTrafficStreams(trafficStreamKeysBeingHeld, status); } @@ -698,7 +698,7 @@ private void commitTrafficStreams(List trafficStreamKeysBeing @Override public void onConnectionClose(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant timestamp, List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); @@ -706,7 +706,7 @@ public void onConnectionClose(ISourceTrafficChannelKey channelKey, int channelIn } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ConnectionContext ctx) { + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ChannelKeyContext ctx) { commitTrafficStreams(List.of(tsk), true); } @@ -875,7 +875,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture - transformAllData(inputRequestTransformerFactory.create(requestKey), packetsSupplier)); + transformAllData(inputRequestTransformerFactory.create(requestKey, ctx), packetsSupplier)); log.atDebug().setMessage(()->"finalizeRequest future for transformation of " + requestKey + " = " + transformationCompleteFuture).log(); // It might be safer to chain this work directly inside the scheduleWork call above so that the diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 321557815..bf85d026f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -27,7 +27,7 @@ import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -64,7 +64,8 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer"Active - setting up backend connection to " + host + ":" + port).log(); @@ -110,7 +111,7 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup if (connectFuture.isSuccess()) { var pipeline = connectFuture.channel().pipeline(); pipeline.removeFirst(); - log.atTrace().setMessage(()->connectionContext.getChannelKey() + + log.atTrace().setMessage(()-> channelKeyContext.getChannelKey() + " Done setting up client channel & it was successful").log(); if (sslContext != null) { var sslEngine = sslContext.newEngine(connectFuture.channel().alloc()); @@ -131,7 +132,7 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup } else { // Close the connection if the connection attempt has failed. log.atWarn().setCause(connectFuture.cause()) - .setMessage(() -> connectionContext.getChannelKey() + " CONNECT future was not successful, " + + .setMessage(() -> channelKeyContext.getChannelKey() + " CONNECT future was not successful, " + "so setting the channel future's result to an exception").log(); rval.setFailure(connectFuture.cause()); } @@ -195,8 +196,8 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa System.identityHashCode(packetData) + ")").log(); return writePacketAndUpdateFuture(packetData); } else { - log.atWarn().setMessage(()->tracingContext.getRequestKey() + "outbound channel was not set up " + - "successfully, NOT writing bytes hash=" + System.identityHashCode(packetData)).log(); + log.atWarn().setMessage(()->tracingContext.getReplayerRequestKey() + "outbound channel was not set " + + "up successfully, NOT writing bytes hash=" + System.identityHashCode(packetData)).log(); channel.close(); return DiagnosticTrackableCompletableFuture.Factory.failedFuture(channelInitException, ()->""); } @@ -211,13 +212,13 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa final var completableFuture = new DiagnosticTrackableCompletableFuture(new CompletableFuture<>(), ()->"CompletableFuture that will wait for the netty future to fill in the completion value"); final int readableBytes = packetData.readableBytes(); - METERING_CLOSURE.meterIncrementEvent(tracingContext.context, "readBytes", packetData.readableBytes()); + METERING_CLOSURE.meterIncrementEvent(tracingContext, "readBytes", packetData.readableBytes()); channel.writeAndFlush(packetData) .addListener((ChannelFutureListener) future -> { Throwable cause = null; try { if (!future.isSuccess()) { - log.atWarn().setMessage(()->tracingContext.getRequestKey() + "closing outbound channel " + + log.atWarn().setMessage(()->tracingContext.getReplayerRequestKey() + "closing outbound channel " + "because WRITE future was not successful " + future.cause() + " hash=" + System.identityHashCode(packetData) + " will be sending the exception to " + completableFuture).log(); @@ -236,9 +237,9 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa " an exception :" + packetData + " hash=" + System.identityHashCode(packetData)).log(); metricsLogger.atError(MetricsEvent.WRITING_REQUEST_COMPONENT_FAILED, cause) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()) - .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getRequestKey().toString()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getReplayerRequestKey().toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, - tracingContext.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); + tracingContext.getReplayerRequestKey().getTrafficStreamKey().getConnectionId()).emit(); completableFuture.future.completeExceptionally(cause); channel.close(); } @@ -247,9 +248,9 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa ". Created future for writing data="+completableFuture).log(); metricsLogger.atSuccess(MetricsEvent.WROTE_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()) - .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getRequestKey()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getReplayerRequestKey()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, - tracingContext.getRequestKey().getTrafficStreamKey().getConnectionId()) + tracingContext.getConnectionId()) .setAttribute(MetricsAttributeKey.SIZE_IN_BYTES, readableBytes).emit(); return completableFuture; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 3019e0c0b..f82014543 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -7,6 +7,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.Utils; @@ -45,6 +46,9 @@ */ @Slf4j public class HttpJsonTransformingConsumer implements IPacketFinalizingConsumer> { + public static final String TELEMETRY_SCOPE_NAME = "HttpTransformer"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + public static final int HTTP_MESSAGE_NUM_SEGMENTS = 2; public static final int EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS = 4; private final RequestPipelineOrchestrator pipelineOrchestrator; @@ -69,6 +73,8 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, IPacketFinalizingConsumer transformedPacketReceiver, String diagnosticLabel, RequestContext requestContext) { + this.requestContext = new RequestContext(requestContext.getReplayerRequestKey(), + METERING_CLOSURE.makeSpan(requestContext, "httpRequestTransformation")); chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); @@ -125,7 +131,7 @@ public DiagnosticTrackableCompletableFuture { - if (t != null) { - t = unwindPossibleCompletionException(t); - if (t instanceof NoContentException) { - return redriveWithoutTransformation(offloadingHandler.packetReceiver, t); - } else { - metricsLogger.atError(MetricsEvent.TRANSFORMING_REQUEST_FAILED, t) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext.toString()) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getChannelKey().getConnectionId()) - .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); - throw new CompletionException(t); - } - } else { - metricsLogger.atSuccess(MetricsEvent.REQUEST_WAS_TRANSFORMED) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getChannelKey().getConnectionId()) - .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); - return StringTrackableCompletableFuture.completedFuture(v, ()->"transformedHttpMessageValue"); - } - }, ()->"HttpJsonTransformingConsumer.finalizeRequest() is waiting to handle"); + (v, t) -> { + requestContext.getCurrentSpan().end(); + METERING_CLOSURE.meterIncrementEvent(requestContext, + t != null ? "transformRequestFailed" : "transformRequestSuccess"); + METERING_CLOSURE.meterHistogramMicros(requestContext, "transformationDuration"); + if (t != null) { + t = unwindPossibleCompletionException(t); + if (t instanceof NoContentException) { + return redriveWithoutTransformation(offloadingHandler.packetReceiver, t); + } else { + metricsLogger.atError(MetricsEvent.TRANSFORMING_REQUEST_FAILED, t) + .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext.toString()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); + throw new CompletionException(t); + } + } else { + metricsLogger.atSuccess(MetricsEvent.REQUEST_WAS_TRANSFORMED) + .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); + return StringTrackableCompletableFuture.completedFuture(v, ()->"transformedHttpMessageValue"); + } + }, ()->"HttpJsonTransformingConsumer.finalizeRequest() is waiting to handle"); } private static Throwable unwindPossibleCompletionException(Throwable t) { @@ -181,7 +191,7 @@ private static Throwable unwindPossibleCompletionException(Throwable t) { ()->"HttpJsonTransformingConsumer.redriveWithoutTransformation.compose()"); metricsLogger.atError(MetricsEvent.REQUEST_REDRIVEN_WITHOUT_TRANSFORMATION, reason) .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getChannelKey().getConnectionId()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); return finalizedFuture.map(f->f.thenApply(r->reason == null ? new TransformedOutputAndResult(r, HttpRequestTransformationStatus.SKIPPED, null) : diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 771076d52..5db40bb9a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -56,7 +56,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception .toString()); metricsLogger.atSuccess(MetricsEvent.CAPTURED_REQUEST_PARSED_TO_HTTP) .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getChannelKey().getConnectionId()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) .setAttribute(MetricsAttributeKey.HTTP_METHOD, request.method()) .setAttribute(MetricsAttributeKey.HTTP_ENDPOINT, request.uri()).emit(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 5e285847d..650c5e003 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -10,7 +10,6 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java new file mode 100644 index 000000000..9b8f81896 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -0,0 +1,25 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.api.trace.Span; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.tracing.IConnectionContext; + +@AllArgsConstructor +public class ChannelKeyContext implements IConnectionContext { + @Getter + final ISourceTrafficChannelKey channelKey; + @Getter + final Span currentSpan; + + @Override + public String getConnectionId() { + return channelKey.getConnectionId(); + } + + @Override + public String getNodeId() { + return channelKey.getNodeId(); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java deleted file mode 100644 index 0312d5140..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ConnectionContext.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import io.netty.util.AttributeKey; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.ContextKey; -import lombok.NonNull; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; - -import java.util.stream.Stream; - -public class ConnectionContext implements WithAttributes { - protected static final ContextKey CHANNEL_KEY_CONTEXT_KEY = ContextKey.named("channelKey"); - protected static final AttributeKey CHANNEL_ATTR = AttributeKey.newInstance("channelKey"); - - public final Context context; - - public ConnectionContext(ISourceTrafficChannelKey tsk) { - this(Context.current().with(CHANNEL_KEY_CONTEXT_KEY, tsk)); - } - - public ConnectionContext(Context c) { - assert c.get(CHANNEL_KEY_CONTEXT_KEY) != null; - context = c; - } - - public @NonNull ISourceTrafficChannelKey getChannelKey() { - return context.get(CHANNEL_KEY_CONTEXT_KEY); - } - - @Override - public Stream getAttributeKeys() { - return Stream.of(CHANNEL_ATTR); - } -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java index bfc0f5407..34feabad4 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java @@ -1,26 +1,54 @@ package org.opensearch.migrations.replay.tracing; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.ContextKey; -import lombok.NonNull; +import io.opentelemetry.api.trace.Span; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.IReplayerRequestContext; +import org.opensearch.migrations.tracing.IWithStartTime; -public class RequestContext extends ConnectionContext { - private static final ContextKey UNIQUE_REQUEST_KEY = ContextKey.named("requestId"); +import java.time.Instant; - public RequestContext(UniqueReplayerRequestKey requestKey) { - this(Context.current(), requestKey); +public class RequestContext implements IReplayerRequestContext, IWithStartTime { + @Getter + final UniqueReplayerRequestKey replayerRequestKey; + @Getter + final Instant startTime; + @Getter + final Span currentSpan; + + public RequestContext(UniqueReplayerRequestKey replayerRequestKey, Span currentSpan) { + this.replayerRequestKey = replayerRequestKey; + this.currentSpan = currentSpan; + this.startTime = Instant.now(); + } + + public ChannelKeyContext getChannelKeyContext() { + return new ChannelKeyContext(replayerRequestKey.trafficStreamKey, currentSpan); + } + + @Override + public String getConnectionId() { + return replayerRequestKey.trafficStreamKey.getConnectionId(); + } + + @Override + public String getNodeId() { + return replayerRequestKey.trafficStreamKey.getNodeId(); } - public RequestContext(ConnectionContext ctx, UniqueReplayerRequestKey requestKey) { - this(ctx.context, requestKey); + @Override + public long sourceRequestIndex() { + return replayerRequestKey.getSourceRequestIndex(); } - public RequestContext(Context context, UniqueReplayerRequestKey requestKey) { - super(context.with(UNIQUE_REQUEST_KEY, requestKey).with(CHANNEL_KEY_CONTEXT_KEY, requestKey.trafficStreamKey)); + @Override + public long replayerRequestIndex() { + return replayerRequestKey.getReplayerRequestIndex(); } - public @NonNull UniqueReplayerRequestKey getRequestKey() { - return context.get(UNIQUE_REQUEST_KEY); + public ISourceTrafficChannelKey getChannelKey() { + return replayerRequestKey.trafficStreamKey; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java deleted file mode 100644 index 1988c7ae3..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/WithAttributes.java +++ /dev/null @@ -1,9 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import io.netty.util.AttributeKey; - -import java.util.stream.Stream; - -public interface WithAttributes { - Stream getAttributeKeys(); -} diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index e7c4b71e6..5fda5f7cf 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -44,7 +44,7 @@ public void testThatSchedulingWorks() throws Exception { var perPacketShift = Duration.ofMillis(10*i/NUM_REPEATS); var startTimeForThisRequest = baseTime.plus(perPacketShift); var requestPackets = makeRequest(i/NUM_REPEATS); - var arr = senderOrchestrator.scheduleRequest(requestContext.getRequestKey(), requestContext, + var arr = senderOrchestrator.scheduleRequest(requestContext.getReplayerRequestKey(), requestContext, startTimeForThisRequest, Duration.ofMillis(1), requestPackets.stream()); log.info("Scheduled item to run at " + startTimeForThisRequest); scheduledItems.add(arr); @@ -52,7 +52,7 @@ public void testThatSchedulingWorks() throws Exception { } var connectionCtx = TestRequestKey.getTestConnectionRequestContext(NUM_REQUESTS_TO_SCHEDULE); var closeFuture = senderOrchestrator.scheduleClose( - connectionCtx.getChannelKey(), NUM_REQUESTS_TO_SCHEDULE, connectionCtx, + connectionCtx.getChannelKey(), NUM_REQUESTS_TO_SCHEDULE, connectionCtx.getChannelKeyContext(), lastEndTime.plus(Duration.ofMillis(100))); Assertions.assertEquals(NUM_REQUESTS_TO_SCHEDULE, scheduledItems.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 90ee03710..400407dec 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -3,6 +3,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.netty.buffer.Unpooled; +import io.opentelemetry.api.GlobalOpenTelemetry; import lombok.extern.slf4j.Slf4j; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; @@ -19,7 +20,9 @@ import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.EmptyContext; import java.io.IOException; import java.io.InputStream; @@ -229,7 +232,8 @@ public void testOutputterForPost() throws IOException { @Test private void testOutputterForRequest(String requestResourceName, String expected) throws IOException { var trafficStreamKey = new PojoTrafficStreamKey(NODE_ID,"c",0); - var sourcePair = new RequestResponsePacketPair(trafficStreamKey); + var sourcePair = new RequestResponsePacketPair(trafficStreamKey, + TestRequestKey.getTestConnectionRequestContext(0)); var rawRequestData = loadResourceAsBytes("/requests/raw/" + requestResourceName); sourcePair.addRequestData(Instant.EPOCH, rawRequestData); var rawResponseData = NettyPacketToHttpConsumerTest.EXPECTED_RESPONSE_STRING.getBytes(StandardCharsets.UTF_8); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index c6959c581..ca9e268a6 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -2,6 +2,7 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; +import io.opentelemetry.api.GlobalOpenTelemetry; import io.vavr.Tuple2; import lombok.AllArgsConstructor; import lombok.NonNull; @@ -14,12 +15,13 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.time.Duration; @@ -97,7 +99,9 @@ static ByteBuf makeSequentialByteBuf(int offset, int size) { static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, List directives) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); - var offloader = connectionFactory.createOffloader("TEST_"+uniqueIdCounter.incrementAndGet()); + var offloader = connectionFactory.createOffloader(new ConnectionContext("test", "test", + GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()), + "TEST_"+uniqueIdCounter.incrementAndGet()); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); } @@ -219,19 +223,19 @@ public void onFullDataReceived(UniqueReplayerRequestKey requestKey, RequestConte @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ConnectionContext ctx, + ChannelKeyContext ctx, List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - ConnectionContext ctx, + ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant when, List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - ConnectionContext ctx) {} + ChannelKeyContext ctx) {} }); var tsList = trafficStreams.collect(Collectors.toList()); trafficStreams = tsList.stream(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index f52770fb9..a7fdfda40 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -9,7 +9,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; @@ -172,19 +172,19 @@ public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ConnectionContext ctx, + ChannelKeyContext ctx, List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - ConnectionContext ctx, + ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant when, List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - ConnectionContext ctx) {} + ChannelKeyContext ctx) {} }); var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); @@ -224,17 +224,17 @@ public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ConnectionContext ctx, + ChannelKeyContext ctx, List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - ConnectionContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, Instant when, List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - ConnectionContext ctx) {} + ChannelKeyContext ctx) {} } ); byte[] serializedChunks; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 5927410f0..29b8cc72d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -4,6 +4,7 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.util.concurrent.DefaultThreadFactory; +import io.opentelemetry.api.GlobalOpenTelemetry; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.AfterAll; @@ -166,7 +167,8 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) String connId = "TEST_" + j; var trafficStreamKey = new PojoTrafficStreamKey("testNodeId", connId, 0); var requestKey = new UniqueReplayerRequestKey(trafficStreamKey, 0, i); - var ctx = new RequestContext(requestKey); + var ctx = new RequestContext(requestKey, + GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, sendingFactory, ctx, Instant.now(), Instant.now(), requestKey, ()->Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 2d94b11a2..b0695c5de 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,8 +1,8 @@ package org.opensearch.migrations.replay; +import io.opentelemetry.api.GlobalOpenTelemetry; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ConnectionContext; import org.opensearch.migrations.replay.tracing.RequestContext; public class TestRequestKey { @@ -13,6 +13,7 @@ public static final RequestContext getTestConnectionRequestContext(int replayerI var rk = new UniqueReplayerRequestKey( new PojoTrafficStreamKey("testNodeId", "testConnectionId", 0), 0, replayerIdx); - return new RequestContext(new UniqueReplayerRequestKey(rk.trafficStreamKey, 1, 1)); + return new RequestContext(new UniqueReplayerRequestKey(rk.trafficStreamKey, 1, 1), + GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); } } From 3746a8ed62fd69f65709bd2dc20a88b11de111bd Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 29 Nov 2023 23:49:36 -0500 Subject: [PATCH 10/94] Get span parenting to work. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 11 ++---- .../tracing/KafkaRecordContext.java | 6 ++- .../KafkaCaptureFactoryTest.java | 2 +- .../tracing/ConnectionContext.java | 10 +++-- .../coreutils/SimpleMeteringClosure.java | 22 +++++++++-- .../migrations/coreutils/SpanGenerator.java | 8 ++++ .../coreutils/SpanWithParentGenerator.java | 9 +++++ .../migrations/tracing/IRequestContext.java | 5 +-- .../migrations/tracing/IWithAttributes.java | 8 +++- .../tracing/IWithStartTimeAndAttributes.java | 4 ++ ...allyReliableLoggingHttpRequestHandler.java | 2 +- .../netty/LoggingHttpRequestHandler.java | 1 - .../netty/LoggingHttpResponseHandler.java | 4 +- ...ReliableLoggingHttpRequestHandlerTest.java | 2 +- .../netty/ProxyChannelInitializer.java | 2 +- TrafficCapture/trafficReplayer/build.gradle | 1 + .../migrations/replay/Accumulation.java | 7 ++-- .../http/HttpJsonTransformingConsumer.java | 37 ++++++++++++------- .../replay/tracing/ChannelKeyContext.java | 9 ++++- .../replay/tracing/RequestContext.java | 26 +++++++++---- ...afficToHttpTransactionAccumulatorTest.java | 2 +- .../NettyPacketToHttpConsumerTest.java | 3 +- .../migrations/replay/TestRequestKey.java | 11 +++++- 23 files changed, 132 insertions(+), 60 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 29c27f425..b2df34d69 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -1,9 +1,6 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader; import com.google.protobuf.CodedOutputStream; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.ContextKey; import lombok.AllArgsConstructor; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @@ -14,7 +11,6 @@ import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; import org.opensearch.migrations.coreutils.SimpleMeteringClosure; -import org.opensearch.migrations.tracing.EmptyContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; @@ -91,7 +87,7 @@ class StreamManager extends OrderedStreamLifecyleManager { public StreamManager(ConnectionContext incomingTelemetryContext, String connectionId) { this.telemetryContext = new ConnectionContext(incomingTelemetryContext, - METERING_CLOSURE.makeSpan(incomingTelemetryContext, "offloaderLifetime")); + METERING_CLOSURE.makeSpanContinuation("offloaderLifetime")); this.connectionId = connectionId; this.startTime = Instant.now(); } @@ -111,7 +107,7 @@ public void close() throws IOException { public CodedOutputStreamWrapper createStream() { METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_created"); var newStreamCtx = new ConnectionContext(telemetryContext, - METERING_CLOSURE.makeSpan(telemetryContext, "recordStream")); + METERING_CLOSURE.makeSpanContinuation("recordStream")); ByteBuffer bb = ByteBuffer.allocate(bufferSize); return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb, newStreamCtx); @@ -125,6 +121,7 @@ public CodedOutputStreamWrapper createStream() { outputStreamHolder); } var osh = (CodedOutputStreamWrapper) outputStreamHolder; + osh.streamContext.currentSpan.end(); // Structured context for MetricsLogger try { @@ -137,7 +134,7 @@ public CodedOutputStreamWrapper createStream() { log.debug("Sending Kafka producer record: {} for topic: {}", recordId, topicNameForTraffic); var flushContext = new KafkaRecordContext(telemetryContext, - METERING_CLOSURE.makeSpan(telemetryContext, "flushRecord"), + METERING_CLOSURE.makeSpanContinuation("flushRecord"), topicNameForTraffic, recordId, kafkaRecord.value().length); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 006112753..1e179a21e 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -5,6 +5,8 @@ import io.opentelemetry.api.trace.Span; import lombok.AllArgsConstructor; import lombok.Getter; +import org.opensearch.migrations.coreutils.SpanGenerator; +import org.opensearch.migrations.coreutils.SpanWithParentGenerator; import org.opensearch.migrations.tracing.IConnectionContext; import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.IWithStartTime; @@ -30,14 +32,14 @@ public class KafkaRecordContext implements IWithAttributes, @Getter public final int recordSize; - public KafkaRecordContext(IConnectionContext enclosingScope, Span currentSpan, + public KafkaRecordContext(IConnectionContext enclosingScope, SpanWithParentGenerator incomingSpan, String topic, String recordId, int recordSize) { this.enclosingScope = enclosingScope; - this.currentSpan = currentSpan; this.topic = topic; this.recordId = recordId; this.recordSize = recordSize; this.startTime = Instant.now(); + currentSpan = incomingSpan.apply(this.getPopulatedAttributes(), enclosingScope.getCurrentSpan()); } @Override diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 7099f6317..403bf9ef1 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -78,7 +78,7 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, private static ConnectionContext createCtx() { return new ConnectionContext("test", "test", - GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); + x->GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); } /** diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 6df11b03d..d661377ea 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -2,6 +2,8 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; +import org.opensearch.migrations.coreutils.SpanGenerator; +import org.opensearch.migrations.coreutils.SpanWithParentGenerator; import org.opensearch.migrations.tracing.IConnectionContext; import org.opensearch.migrations.tracing.IWithStartTime; @@ -17,17 +19,17 @@ public class ConnectionContext implements IConnectionContext, IWithStartTime { @Getter private final Instant startTime; - public ConnectionContext(ConnectionContext oldContext, Span currentSpan) { + public ConnectionContext(ConnectionContext oldContext, SpanWithParentGenerator spanGenerator) { this.connectionId = oldContext.getConnectionId(); this.nodeId = oldContext.getNodeId(); - this.currentSpan = currentSpan; this.startTime = Instant.now(); + this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), oldContext.getCurrentSpan()); } - public ConnectionContext(String connectionId, String nodeId, Span currentSpan) { + public ConnectionContext(String connectionId, String nodeId, SpanGenerator spanGenerator) { this.connectionId = connectionId; this.nodeId = nodeId; - this.currentSpan = currentSpan; + this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); this.startTime = Instant.now(); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java index f890264f2..ad6b484f0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java @@ -1,9 +1,12 @@ package org.opensearch.migrations.coreutils; import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; @@ -21,6 +24,7 @@ import java.time.Duration; import java.time.Instant; +import java.util.Optional; import java.util.concurrent.TimeUnit; public class SimpleMeteringClosure { @@ -131,9 +135,19 @@ public void meterHistogram(IWithAttributes ctx, String eventName, String units, .build()); } - public Span makeSpan(IWithAttributes ctx, String spanName) { - var span = tracer.spanBuilder(spanName).startSpan(); - span.setAllAttributes(ctx.getPopulatedAttributesBuilder().build()); - return span; + public SpanGenerator makeSpanContinuation(String spanName, Span parentSpan) { + var builder = tracer.spanBuilder(spanName); + return (attrs) -> getSpanWithParent(builder, attrs, parentSpan); + } + + public static Span getSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan) { + return Optional.ofNullable(parentSpan).map(p -> builder.setParent(Context.current().with(p))) + .orElseGet(builder::setNoParent) + .startSpan().setAllAttributes(attrs); + } + + public SpanWithParentGenerator makeSpanContinuation(String spanName) { + var builder = tracer.spanBuilder(spanName); + return (attrs,parentSpan) -> getSpanWithParent(builder, attrs, parentSpan); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java new file mode 100644 index 000000000..188f04099 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java @@ -0,0 +1,8 @@ +package org.opensearch.migrations.coreutils; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; + +import java.util.function.Function; + +public interface SpanGenerator extends Function { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java new file mode 100644 index 000000000..c404e46b9 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java @@ -0,0 +1,9 @@ +package org.opensearch.migrations.coreutils; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; + +import java.util.function.BiFunction; + +public interface SpanWithParentGenerator extends BiFunction { +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java index 67744ff5f..c199c7aa8 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java @@ -3,14 +3,13 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -public interface IRequestContext extends IConnectionContext { +public interface IRequestContext extends IWithAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); long sourceRequestIndex(); @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IConnectionContext.super.fillAttributes( - builder.put(SOURCE_REQUEST_INDEX_KEY, sourceRequestIndex())); + return builder.put(SOURCE_REQUEST_INDEX_KEY, sourceRequestIndex()); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java index 75d443d2d..6f4f244b0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java @@ -11,7 +11,13 @@ public interface IWithAttributes { Span getCurrentSpan(); - AttributesBuilder fillAttributes(AttributesBuilder builder); + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return builder; + } + + default Attributes getPopulatedAttributes() { + return getPopulatedAttributesBuilder().build(); + } default AttributesBuilder getPopulatedAttributesBuilder() { var currentObj = this; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java new file mode 100644 index 000000000..31be5b08c --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -0,0 +1,4 @@ +package org.opensearch.migrations.tracing; + +public interface IWithStartTimeAndAttributes extends IWithStartTime, IWithAttributes { +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index de3391fbe..b01a1dcb3 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -30,7 +30,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob if (shouldBlockPredicate.test(httpRequest)) { METERING_CLOSURE.meterIncrementEvent(connectionContext, "blockingRequestUntilFlush"); var flushContext = new ConnectionContext(connectionContext, - METERING_CLOSURE.tracer.spanBuilder("blockedForFlush").startSpan()); + METERING_CLOSURE.makeSpanContinuation("blockedForFlush")); trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index 7f3191dc0..c1814be03 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -81,7 +81,6 @@ public HttpRequest resetCurrentRequest() { public LoggingHttpRequestHandler(ConnectionContext incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { this.connectionContext = incomingContext; - var span = METERING_CLOSURE.makeSpan(incomingContext, "frontendConnection"); METERING_CLOSURE.meterIncrementEvent(incomingContext, "requestStarted"); this.trafficOffloader = trafficOffloader; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java index 94e93d021..fce1be7e2 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java @@ -43,8 +43,8 @@ public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelP public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); - var span = METERING_CLOSURE.makeSpan(telemetryContext,"backendConnection"); - telemetryContext = new ConnectionContext(telemetryContext, span); + telemetryContext = new ConnectionContext(telemetryContext, + METERING_CLOSURE.makeSpanContinuation("backendConnection")); connectTime = Instant.now(); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "connect"); METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", 1); diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index 86cd36ea5..d6fe17004 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -84,7 +84,7 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, ConsumerGlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); EmbeddedChannel channel = new EmbeddedChannel( new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, x->true)); // true: block every request channelWriter.accept(channel); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 2e48c9765..feddb9f91 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -47,7 +47,7 @@ protected void initChannel(SocketChannel ch) throws IOException { var connectionId = ch.id().asLongText(); var ctx = new ConnectionContext(connectionId, "", - METERING_CLOSURE.makeSpan(EmptyContext.singleton, "connectionLifetime")); + METERING_CLOSURE.makeSpanContinuation("connectionLifetime", null)); var offloader = connectionCaptureFactory.createOffloader(ctx, connectionId); ch.pipeline().addLast(new LoggingHttpResponseHandler<>(ctx, offloader)); ch.pipeline().addLast(new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, diff --git a/TrafficCapture/trafficReplayer/build.gradle b/TrafficCapture/trafficReplayer/build.gradle index 7cd49c99d..c50a37d27 100644 --- a/TrafficCapture/trafficReplayer/build.gradle +++ b/TrafficCapture/trafficReplayer/build.gradle @@ -66,6 +66,7 @@ dependencies { implementation 'org.apache.commons:commons-compress:1.24.0' testFixturesImplementation project(':replayerPlugins:jsonMessageTransformers:jsonMessageTransformerInterface') + testFixturesImplementation project(':coreUtilities') testFixturesImplementation testFixtures(project(path: ':testUtilities')) testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index fa474ac4e..7b76a000b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -7,7 +7,6 @@ import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; -import org.opensearch.migrations.tracing.EmptyContext; import java.time.Instant; import java.util.concurrent.atomic.AtomicInteger; @@ -49,7 +48,7 @@ public Accumulation(@NonNull ITrafficStreamKey trafficChannelKey, this.state = dropObservationsLeftoverFromPrevious ? State.IGNORING_LAST_REQUEST : State.WAITING_FOR_NEXT_READ_CHUNK; channelContext = new ChannelKeyContext(trafficChannelKey, - METERING_CLOSURE.makeSpan(EmptyContext.singleton, "accumulatingChannel")); + METERING_CLOSURE.makeSpanContinuation("accumulatingChannel", null)); } public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey forTrafficStreamKey) { @@ -57,8 +56,8 @@ public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey fo return rrPair; } this.rrPair = new RequestResponsePacketPair(forTrafficStreamKey, - new RequestContext(getRequestKey(forTrafficStreamKey), - METERING_CLOSURE.makeSpan(channelContext, "accumulatingRequest"))); + new RequestContext(channelContext, getRequestKey(forTrafficStreamKey), + METERING_CLOSURE.makeSpanContinuation("accumulatingRequest"))); return rrPair; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index f82014543..ae86fd5a9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -3,6 +3,9 @@ import io.netty.buffer.ByteBuf; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.HttpRequestDecoder; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; @@ -15,11 +18,14 @@ import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; import org.slf4j.event.Level; import java.nio.charset.StandardCharsets; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Optional; @@ -54,7 +60,7 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume private final RequestPipelineOrchestrator pipelineOrchestrator; private final EmbeddedChannel channel; private static final MetricsLogger metricsLogger = new MetricsLogger("HttpJsonTransformingConsumer"); - private RequestContext requestContext; + private IWithStartTimeAndAttributes transformationContext; /** * Roughly try to keep track of how big each data chunk was that came into the transformer. These values @@ -73,13 +79,18 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, IPacketFinalizingConsumer transformedPacketReceiver, String diagnosticLabel, RequestContext requestContext) { - this.requestContext = new RequestContext(requestContext.getReplayerRequestKey(), - METERING_CLOSURE.makeSpan(requestContext, "httpRequestTransformation")); + this.transformationContext = new IWithStartTimeAndAttributes<>() { + @Getter + Span currentSpan = METERING_CLOSURE.makeSpanContinuation("httpRequestTransformation") + .apply(requestContext.getPopulatedAttributes(), requestContext.getCurrentSpan()); + @Getter + Instant startTime = Instant.now(); + @Override public RequestContext getEnclosingScope() { return requestContext; } + }; chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); channel = new EmbeddedChannel(); - this.requestContext = requestContext; pipelineOrchestrator = new RequestPipelineOrchestrator<>(chunkSizes, transformedPacketReceiver, authTransformerFactory, diagnosticLabel, requestContext); pipelineOrchestrator.addInitialHandlers(channel.pipeline(), transformer); @@ -142,25 +153,25 @@ public DiagnosticTrackableCompletableFuture { - requestContext.getCurrentSpan().end(); - METERING_CLOSURE.meterIncrementEvent(requestContext, + transformationContext.getCurrentSpan().end(); + METERING_CLOSURE.meterIncrementEvent(transformationContext, t != null ? "transformRequestFailed" : "transformRequestSuccess"); - METERING_CLOSURE.meterHistogramMicros(requestContext, "transformationDuration"); + METERING_CLOSURE.meterHistogramMicros(transformationContext, "transformationDuration"); if (t != null) { t = unwindPossibleCompletionException(t); if (t instanceof NoContentException) { return redriveWithoutTransformation(offloadingHandler.packetReceiver, t); } else { metricsLogger.atError(MetricsEvent.TRANSFORMING_REQUEST_FAILED, t) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext.toString()) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, transformationContext.toString()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, transformationContext.getEnclosingScope().getConnectionId()) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); throw new CompletionException(t); } } else { metricsLogger.atSuccess(MetricsEvent.REQUEST_WAS_TRANSFORMED) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, transformationContext) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, transformationContext.getEnclosingScope().getConnectionId()) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); return StringTrackableCompletableFuture.completedFuture(v, ()->"transformedHttpMessageValue"); } @@ -190,8 +201,8 @@ private static Throwable unwindPossibleCompletionException(Throwable t) { consumptionChainedFuture.thenCompose(v -> packetConsumer.finalizeRequest(), ()->"HttpJsonTransformingConsumer.redriveWithoutTransformation.compose()"); metricsLogger.atError(MetricsEvent.REQUEST_REDRIVEN_WITHOUT_TRANSFORMATION, reason) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, transformationContext) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, transformationContext.getEnclosingScope().getConnectionId()) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); return finalizedFuture.map(f->f.thenApply(r->reason == null ? new TransformedOutputAndResult(r, HttpRequestTransformationStatus.SKIPPED, null) : diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java index 9b8f81896..075b49f08 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -1,18 +1,23 @@ package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.trace.Span; -import lombok.AllArgsConstructor; import lombok.Getter; +import org.opensearch.migrations.coreutils.SpanGenerator; +import org.opensearch.migrations.coreutils.SpanWithParentGenerator; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.tracing.IConnectionContext; -@AllArgsConstructor public class ChannelKeyContext implements IConnectionContext { @Getter final ISourceTrafficChannelKey channelKey; @Getter final Span currentSpan; + public ChannelKeyContext(ISourceTrafficChannelKey channelKey, SpanGenerator spanGenerator) { + this.channelKey = channelKey; + this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); + } + @Override public String getConnectionId() { return channelKey.getConnectionId(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java index 34feabad4..947ba19fb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java @@ -1,16 +1,23 @@ package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.trace.Span; -import lombok.AllArgsConstructor; import lombok.Getter; +import org.opensearch.migrations.coreutils.SpanGenerator; +import org.opensearch.migrations.coreutils.SpanWithParentGenerator; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.EmptyContext; +import org.opensearch.migrations.tracing.IConnectionContext; import org.opensearch.migrations.tracing.IReplayerRequestContext; +import org.opensearch.migrations.tracing.IRequestContext; +import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Instant; public class RequestContext implements IReplayerRequestContext, IWithStartTime { + @Getter + IConnectionContext enclosingScope; @Getter final UniqueReplayerRequestKey replayerRequestKey; @Getter @@ -18,24 +25,27 @@ public class RequestContext implements IReplayerRequestContext, IWithStartTime { @Getter final Span currentSpan; - public RequestContext(UniqueReplayerRequestKey replayerRequestKey, Span currentSpan) { + IWithAttributes> foo; + + public RequestContext(ChannelKeyContext enclosingScope, UniqueReplayerRequestKey replayerRequestKey, + SpanWithParentGenerator spanGenerator) { + this.enclosingScope = enclosingScope; this.replayerRequestKey = replayerRequestKey; - this.currentSpan = currentSpan; this.startTime = Instant.now(); + this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan()); } public ChannelKeyContext getChannelKeyContext() { - return new ChannelKeyContext(replayerRequestKey.trafficStreamKey, currentSpan); + return new ChannelKeyContext(replayerRequestKey.trafficStreamKey, + innerAttributesToIgnore_LeavingOriginalAttributesInPlace->currentSpan); } - @Override public String getConnectionId() { - return replayerRequestKey.trafficStreamKey.getConnectionId(); + return enclosingScope.getConnectionId(); } - @Override public String getNodeId() { - return replayerRequestKey.trafficStreamKey.getNodeId(); + return enclosingScope.getNodeId(); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index ca9e268a6..eb4746335 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -100,7 +100,7 @@ static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, List directives) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); var offloader = connectionFactory.createOffloader(new ConnectionContext("test", "test", - GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()), + x->GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()), "TEST_"+uniqueIdCounter.incrementAndGet()); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 29b8cc72d..e6f833c7b 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -167,8 +167,7 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) String connId = "TEST_" + j; var trafficStreamKey = new PojoTrafficStreamKey("testNodeId", connId, 0); var requestKey = new UniqueReplayerRequestKey(trafficStreamKey, 0, i); - var ctx = new RequestContext(requestKey, - GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); + var ctx = TestRequestKey.getTestConnectionRequestContext(0); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, sendingFactory, ctx, Instant.now(), Instant.now(), requestKey, ()->Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index b0695c5de..ff11ff423 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,9 +1,13 @@ package org.opensearch.migrations.replay; import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.common.Attributes; +import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.tracing.EmptyContext; public class TestRequestKey { @@ -13,7 +17,10 @@ public static final RequestContext getTestConnectionRequestContext(int replayerI var rk = new UniqueReplayerRequestKey( new PojoTrafficStreamKey("testNodeId", "testConnectionId", 0), 0, replayerIdx); - return new RequestContext(new UniqueReplayerRequestKey(rk.trafficStreamKey, 1, 1), - GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); + var smc = new SimpleMeteringClosure("test"); + var channelKeyContext = new ChannelKeyContext(rk.trafficStreamKey, smc.makeSpanContinuation("test", null)); + return new RequestContext(channelKeyContext, + new UniqueReplayerRequestKey(rk.trafficStreamKey, 1, 1), + smc.makeSpanContinuation("test2")); } } From 4b432622f72cf4778529d20725fc76ce4fbdb743 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 30 Nov 2023 09:31:14 -0500 Subject: [PATCH 11/94] Attempt to fix a failing unit test. Make sure that the context is using the right requestKey, which also will have the appropriate indices as per the test context. Signed-off-by: Greg Schohn --- .../java/org/opensearch/migrations/replay/TestRequestKey.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index ff11ff423..a47c7b98f 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -19,8 +19,6 @@ public static final RequestContext getTestConnectionRequestContext(int replayerI 0, replayerIdx); var smc = new SimpleMeteringClosure("test"); var channelKeyContext = new ChannelKeyContext(rk.trafficStreamKey, smc.makeSpanContinuation("test", null)); - return new RequestContext(channelKeyContext, - new UniqueReplayerRequestKey(rk.trafficStreamKey, 1, 1), - smc.makeSpanContinuation("test2")); + return new RequestContext(channelKeyContext, rk, smc.makeSpanContinuation("test2")); } } From 322e12fc368d69c8ca21d1a842884b3786cab809 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 30 Nov 2023 15:26:15 -0500 Subject: [PATCH 12/94] Refactor. Couple name changes, class package changes, and moved IReplayerRequestContext to the replayer Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 2 +- .../kafkaoffloader/tracing/KafkaRecordContext.java | 7 +++---- .../trafficcapture/tracing/ConnectionContext.java | 10 +++++----- .../migrations/tracing/ContextWithSpan.java | 10 ---------- .../ISpanGenerator.java} | 4 ++-- .../ISpanWithParentGenerator.java} | 4 ++-- .../{coreutils => tracing}/SimpleMeteringClosure.java | 8 +++----- .../{ => commoncontexts}/IConnectionContext.java | 4 +++- .../tracing/{ => commoncontexts}/IRequestContext.java | 7 ++++--- .../netty/LoggingHttpRequestHandler.java | 2 +- .../netty/LoggingHttpResponseHandler.java | 2 +- .../trafficcapture/proxyserver/CaptureProxy.java | 2 +- .../proxyserver/netty/FrontsideHandler.java | 4 ++++ .../proxyserver/netty/ProxyChannelInitializer.java | 3 +-- .../opensearch/migrations/replay/Accumulation.java | 2 +- .../migrations/replay/ClientConnectionPool.java | 2 +- .../PacketToTransformingHttpHandlerFactory.java | 1 - .../migrations/replay/RequestResponsePacketPair.java | 2 -- .../opensearch/migrations/replay/TrafficReplayer.java | 2 +- .../datahandlers/NettyPacketToHttpConsumer.java | 2 +- .../http/HttpJsonTransformingConsumer.java | 4 +--- .../migrations/replay/tracing/ChannelKeyContext.java | 7 +++---- .../replay}/tracing/IReplayerRequestContext.java | 3 ++- .../migrations/replay/tracing/RequestContext.java | 11 ++++------- .../opensearch/migrations/replay/TestRequestKey.java | 5 +---- 25 files changed, 46 insertions(+), 64 deletions(-) delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/{coreutils/SpanGenerator.java => tracing/ISpanGenerator.java} (52%) rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/{coreutils/SpanWithParentGenerator.java => tracing/ISpanWithParentGenerator.java} (50%) rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/{coreutils => tracing}/SimpleMeteringClosure.java (95%) rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/{ => commoncontexts}/IConnectionContext.java (81%) rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/{ => commoncontexts}/IRequestContext.java (62%) rename TrafficCapture/{coreUtilities/src/main/java/org/opensearch/migrations => trafficReplayer/src/main/java/org/opensearch/migrations/replay}/tracing/IReplayerRequestContext.java (81%) diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index b2df34d69..25c1c967b 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -10,7 +10,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 1e179a21e..140cfb601 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -5,9 +5,8 @@ import io.opentelemetry.api.trace.Span; import lombok.AllArgsConstructor; import lombok.Getter; -import org.opensearch.migrations.coreutils.SpanGenerator; -import org.opensearch.migrations.coreutils.SpanWithParentGenerator; -import org.opensearch.migrations.tracing.IConnectionContext; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.IWithStartTime; @@ -32,7 +31,7 @@ public class KafkaRecordContext implements IWithAttributes, @Getter public final int recordSize; - public KafkaRecordContext(IConnectionContext enclosingScope, SpanWithParentGenerator incomingSpan, + public KafkaRecordContext(IConnectionContext enclosingScope, ISpanWithParentGenerator incomingSpan, String topic, String recordId, int recordSize) { this.enclosingScope = enclosingScope; this.topic = topic; diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index d661377ea..fa43248cb 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -2,9 +2,9 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; -import org.opensearch.migrations.coreutils.SpanGenerator; -import org.opensearch.migrations.coreutils.SpanWithParentGenerator; -import org.opensearch.migrations.tracing.IConnectionContext; +import org.opensearch.migrations.tracing.ISpanGenerator; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Instant; @@ -19,14 +19,14 @@ public class ConnectionContext implements IConnectionContext, IWithStartTime { @Getter private final Instant startTime; - public ConnectionContext(ConnectionContext oldContext, SpanWithParentGenerator spanGenerator) { + public ConnectionContext(ConnectionContext oldContext, ISpanWithParentGenerator spanGenerator) { this.connectionId = oldContext.getConnectionId(); this.nodeId = oldContext.getNodeId(); this.startTime = Instant.now(); this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), oldContext.getCurrentSpan()); } - public ConnectionContext(String connectionId, String nodeId, SpanGenerator spanGenerator) { + public ConnectionContext(String connectionId, String nodeId, ISpanGenerator spanGenerator) { this.connectionId = connectionId; this.nodeId = nodeId; this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java deleted file mode 100644 index 80d419891..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ContextWithSpan.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.trace.Span; -import lombok.AllArgsConstructor; - -@AllArgsConstructor -public class ContextWithSpan> { - public final T context; - public final Span span; -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java similarity index 52% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java index 188f04099..84eb59192 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanGenerator.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java @@ -1,8 +1,8 @@ -package org.opensearch.migrations.coreutils; +package org.opensearch.migrations.tracing; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import java.util.function.Function; -public interface SpanGenerator extends Function { } +public interface ISpanGenerator extends Function { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java similarity index 50% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java index c404e46b9..bdd4dc066 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SpanWithParentGenerator.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java @@ -1,9 +1,9 @@ -package org.opensearch.migrations.coreutils; +package org.opensearch.migrations.tracing; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.Span; import java.util.function.BiFunction; -public interface SpanWithParentGenerator extends BiFunction { +public interface ISpanWithParentGenerator extends BiFunction { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java similarity index 95% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java index ad6b484f0..3714739c7 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java @@ -1,4 +1,4 @@ -package org.opensearch.migrations.coreutils; +package org.opensearch.migrations.tracing; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.common.Attributes; @@ -19,8 +19,6 @@ import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; -import org.opensearch.migrations.tracing.IWithAttributes; -import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Duration; import java.time.Instant; @@ -135,7 +133,7 @@ public void meterHistogram(IWithAttributes ctx, String eventName, String units, .build()); } - public SpanGenerator makeSpanContinuation(String spanName, Span parentSpan) { + public ISpanGenerator makeSpanContinuation(String spanName, Span parentSpan) { var builder = tracer.spanBuilder(spanName); return (attrs) -> getSpanWithParent(builder, attrs, parentSpan); } @@ -146,7 +144,7 @@ public static Span getSpanWithParent(SpanBuilder builder, Attributes attrs, Span .startSpan().setAllAttributes(attrs); } - public SpanWithParentGenerator makeSpanContinuation(String spanName) { + public ISpanWithParentGenerator makeSpanContinuation(String spanName) { var builder = tracer.spanBuilder(spanName); return (attrs,parentSpan) -> getSpanWithParent(builder, attrs, parentSpan); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java similarity index 81% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 075ba18f1..83476b9fa 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -1,7 +1,9 @@ -package org.opensearch.migrations.tracing; +package org.opensearch.migrations.tracing.commoncontexts; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.EmptyContext; +import org.opensearch.migrations.tracing.IWithAttributes; public interface IConnectionContext extends IWithAttributes { static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IRequestContext.java similarity index 62% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IRequestContext.java index c199c7aa8..c6b932551 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRequestContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IRequestContext.java @@ -1,15 +1,16 @@ -package org.opensearch.migrations.tracing; +package org.opensearch.migrations.tracing.commoncontexts; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.IWithAttributes; public interface IRequestContext extends IWithAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); - long sourceRequestIndex(); + long getSourceRequestIndex(); @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder.put(SOURCE_REQUEST_INDEX_KEY, sourceRequestIndex()); + return builder.put(SOURCE_REQUEST_INDEX_KEY, getSourceRequestIndex()); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index c1814be03..4dcb35ea0 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -18,7 +18,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java index fce1be7e2..c82c1b27e 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java @@ -8,7 +8,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index c4736c75f..7350e1f4f 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -17,7 +17,7 @@ import org.apache.kafka.common.config.SaslConfigs; import org.apache.logging.log4j.core.util.NullOutputStream; import org.opensearch.common.settings.Settings; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.FileConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/FrontsideHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/FrontsideHandler.java index 14416838b..d3347d8ce 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/FrontsideHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/FrontsideHandler.java @@ -8,6 +8,10 @@ import io.netty.util.ReferenceCountUtil; import lombok.extern.slf4j.Slf4j; +/** + * TODO - this should be renamed ForwardingHandler. It's the last handler of the front, + * but since it isn't the front of the frontside handlers, this name seems misleading. + */ @Slf4j public class FrontsideHandler extends ChannelInboundHandlerAdapter { diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index feddb9f91..f1e86ff2a 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -5,8 +5,7 @@ import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.ssl.SslHandler; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; -import org.opensearch.migrations.tracing.EmptyContext; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpRequestHandler; import org.opensearch.migrations.trafficcapture.netty.LoggingHttpResponseHandler; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index 7b76a000b..8d657613a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -1,7 +1,7 @@ package org.opensearch.migrations.replay; import lombok.NonNull; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index e04d45e87..a40cbb01a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -14,7 +14,7 @@ import io.opentelemetry.context.ContextKey; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index 07b8c7647..d827f28bf 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.replay; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datahandlers.TransformedPacketReceiver; import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 45f686eb7..f6cf9dbde 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -5,14 +5,12 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.tracing.RequestContext; -import org.opensearch.migrations.tracing.IRequestContext; import java.nio.charset.StandardCharsets; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Optional; @Slf4j public class RequestResponsePacketPair { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 086cb14e2..85b90d822 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -14,7 +14,7 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index bf85d026f..35615fe44 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -23,7 +23,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index ae86fd5a9..4df9735dc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -3,14 +3,13 @@ import io.netty.buffer.ByteBuf; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.HttpRequestDecoder; -import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.Utils; @@ -18,7 +17,6 @@ import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; -import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java index 075b49f08..3a8780fbe 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -2,10 +2,9 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; -import org.opensearch.migrations.coreutils.SpanGenerator; -import org.opensearch.migrations.coreutils.SpanWithParentGenerator; +import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.tracing.IConnectionContext; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; public class ChannelKeyContext implements IConnectionContext { @Getter @@ -13,7 +12,7 @@ public class ChannelKeyContext implements IConnectionContext { @Getter final Span currentSpan; - public ChannelKeyContext(ISourceTrafficChannelKey channelKey, SpanGenerator spanGenerator) { + public ChannelKeyContext(ISourceTrafficChannelKey channelKey, ISpanGenerator spanGenerator) { this.channelKey = channelKey; this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java similarity index 81% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java rename to TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java index 5bb61fce0..3de2e3dc1 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IReplayerRequestContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java @@ -1,7 +1,8 @@ -package org.opensearch.migrations.tracing; +package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.commoncontexts.IRequestContext; public interface IReplayerRequestContext extends IRequestContext { static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java index 947ba19fb..2fd747f32 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java @@ -2,14 +2,11 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; -import org.opensearch.migrations.coreutils.SpanGenerator; -import org.opensearch.migrations.coreutils.SpanWithParentGenerator; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.tracing.EmptyContext; -import org.opensearch.migrations.tracing.IConnectionContext; -import org.opensearch.migrations.tracing.IReplayerRequestContext; -import org.opensearch.migrations.tracing.IRequestContext; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.IWithStartTime; @@ -28,7 +25,7 @@ public class RequestContext implements IReplayerRequestContext, IWithStartTime { IWithAttributes> foo; public RequestContext(ChannelKeyContext enclosingScope, UniqueReplayerRequestKey replayerRequestKey, - SpanWithParentGenerator spanGenerator) { + ISpanWithParentGenerator spanGenerator) { this.enclosingScope = enclosingScope; this.replayerRequestKey = replayerRequestKey; this.startTime = Instant.now(); @@ -49,7 +46,7 @@ public String getNodeId() { } @Override - public long sourceRequestIndex() { + public long getSourceRequestIndex() { return replayerRequestKey.getSourceRequestIndex(); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index a47c7b98f..5089e6d7f 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,13 +1,10 @@ package org.opensearch.migrations.replay; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.common.Attributes; -import org.opensearch.migrations.coreutils.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; -import org.opensearch.migrations.tracing.EmptyContext; public class TestRequestKey { From 723bf778f1a505a0c2ef59d5c96a8d9f6126c9f7 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 1 Dec 2023 12:40:57 -0500 Subject: [PATCH 13/94] Bundle all of the offloader spans with the netty handler spans. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 18 ++- TrafficCapture/captureOffloader/build.gradle | 4 +- .../FileConnectionCaptureFactory.java | 5 +- .../IConnectionCaptureFactory.java | 3 +- .../tracing/ConnectionContext.java | 2 +- .../InMemoryConnectionCaptureFactory.java | 3 +- .../src/main/docker/docker-compose.yml | 1 + ...allyReliableLoggingHttpRequestHandler.java | 31 +++-- .../netty/LoggingHttpRequestHandler.java | 81 ++++++++---- .../netty/LoggingHttpResponseHandler.java | 118 +++++++----------- .../netty/RequestContextStateMachine.java | 24 ++++ .../netty/tracing/HttpMessageContext.java | 39 ++++++ ...ReliableLoggingHttpRequestHandlerTest.java | 10 +- .../proxyserver/CaptureProxy.java | 2 +- .../netty/ProxyChannelInitializer.java | 10 +- .../http/HttpJsonTransformingConsumer.java | 6 +- .../replay/tracing/RequestContext.java | 4 - ...afficToHttpTransactionAccumulatorTest.java | 5 +- .../src/test/resources/log4j2.properties | 5 + 19 files changed, 227 insertions(+), 144 deletions(-) create mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java create mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 25c1c967b..c1c84ad81 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; import org.opensearch.migrations.tracing.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; @@ -60,11 +61,8 @@ public KafkaCaptureFactory(String nodeId, Producer producer, int } @Override - public IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, + public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) { - METERING_CLOSURE.meterIncrementEvent(ctx, "offloader_created"); - METERING_CLOSURE.meterDeltaEvent(ctx, "offloaders_active", 1); - return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager(ctx, connectionId)); } @@ -81,13 +79,15 @@ static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { } class StreamManager extends OrderedStreamLifecyleManager { - ConnectionContext telemetryContext; + IConnectionContext telemetryContext; String connectionId; Instant startTime; - public StreamManager(ConnectionContext incomingTelemetryContext, String connectionId) { - this.telemetryContext = new ConnectionContext(incomingTelemetryContext, - METERING_CLOSURE.makeSpanContinuation("offloaderLifetime")); + public StreamManager(IConnectionContext ctx, String connectionId) { + this.telemetryContext = ctx; + METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_created"); + METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", 1); + this.connectionId = connectionId; this.startTime = Instant.now(); } @@ -99,8 +99,6 @@ public void close() throws IOException { Duration.between(startTime, Instant.now())); METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", -1); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_closed"); - - telemetryContext.currentSpan.end(); } @Override diff --git a/TrafficCapture/captureOffloader/build.gradle b/TrafficCapture/captureOffloader/build.gradle index 9c183b4a0..d4b9848dd 100644 --- a/TrafficCapture/captureOffloader/build.gradle +++ b/TrafficCapture/captureOffloader/build.gradle @@ -35,7 +35,7 @@ dependencies { testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' testImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' - testFixturesImplementation "com.google.protobuf:protobuf-java:3.22.2" testFixturesImplementation project(':captureProtobufs') - + testFixturesImplementation project(':coreUtilities') + testFixturesImplementation group: 'com.google.protobuf', name: 'protobuf-java', version: '3.22.2' } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java index b7ce9c029..a566b9e9c 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java @@ -3,6 +3,7 @@ import lombok.AllArgsConstructor; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.FileNotFoundException; @@ -84,7 +85,7 @@ public CodedOutputStreamAndByteBufferWrapper createStream() { } @Override - public IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, String connectionId) { - return new StreamChannelConnectionCaptureSerializer(nodeId, connectionId, new StreamManager(connectionId)); + public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) { + return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager(connectionId)); } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java index c5c5270e5..1b8def0e1 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java @@ -1,9 +1,10 @@ package org.opensearch.migrations.trafficcapture; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; public interface IConnectionCaptureFactory { - IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, String connectionId) throws IOException; + IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index fa43248cb..c6d3bc5f0 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -19,7 +19,7 @@ public class ConnectionContext implements IConnectionContext, IWithStartTime { @Getter private final Instant startTime; - public ConnectionContext(ConnectionContext oldContext, ISpanWithParentGenerator spanGenerator) { + public ConnectionContext(IConnectionContext oldContext, ISpanWithParentGenerator spanGenerator) { this.connectionId = oldContext.getConnectionId(); this.nodeId = oldContext.getNodeId(); this.startTime = Instant.now(); diff --git a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java index 3c899e2eb..24c7718dd 100644 --- a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java @@ -3,6 +3,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import lombok.AllArgsConstructor; import lombok.Getter; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; @@ -60,7 +61,7 @@ protected CompletableFuture kickoffCloseStream(CodedOutputStreamHolder out } @Override - public IChannelConnectionCaptureSerializer createOffloader(ConnectionContext ctx, String connectionId) throws IOException { + public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) throws IOException { // This array is only an indirection to work around Java's constraint that lambda values are final return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager()); } diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index 1085d203f..18a2c9f89 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -153,6 +153,7 @@ services: - sharedReplayerOutput:/shared-replayer-output environment: - MIGRATION_KAFKA_BROKER_ENDPOINTS=kafka:9092 +# command: ./runTestBenchmarks.sh volumes: zookeeper_data: diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index b01a1dcb3..29df56e56 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -3,24 +3,31 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import io.netty.util.ReferenceCountUtil; -import io.opentelemetry.context.ContextKey; +import io.opentelemetry.api.trace.Span; +import lombok.Getter; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; +import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import java.io.IOException; import java.time.Instant; +import java.util.function.Function; import java.util.function.Predicate; @Slf4j public class ConditionallyReliableLoggingHttpRequestHandler extends LoggingHttpRequestHandler { - private ContextKey START_FLUSH_KEY = ContextKey.named("startTime"); private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpRequestHandler(ConnectionContext incomingContext, - IChannelConnectionCaptureSerializer trafficOffloader, - Predicate headerPredicateForWhenToBlock) { - super(incomingContext, trafficOffloader); + public ConditionallyReliableLoggingHttpRequestHandler(String nodeId, String connectionId, + IConnectionCaptureFactory trafficOffloaderFactory, + Predicate headerPredicateForWhenToBlock) + throws IOException { + super(nodeId, connectionId, trafficOffloaderFactory); this.shouldBlockPredicate = headerPredicateForWhenToBlock; } @@ -28,9 +35,13 @@ public ConditionallyReliableLoggingHttpRequestHandler(ConnectionContext incoming protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { if (shouldBlockPredicate.test(httpRequest)) { - METERING_CLOSURE.meterIncrementEvent(connectionContext, "blockingRequestUntilFlush"); - var flushContext = new ConnectionContext(connectionContext, - METERING_CLOSURE.makeSpanContinuation("blockedForFlush")); + METERING_CLOSURE.meterIncrementEvent(messageContext, "blockingRequestUntilFlush"); + var flushContext = new IWithStartTimeAndAttributes<>() { + @Getter Span currentSpan = METERING_CLOSURE.makeSpanContinuation("blockedForFlush") + .apply(messageContext.getPopulatedAttributes(), messageContext.getCurrentSpan()); + @Getter Instant startTime = Instant.now(); + @Override public HttpMessageContext getEnclosingScope() { return messageContext; } + }; trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); @@ -55,7 +66,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob } }); } else { - METERING_CLOSURE.meterIncrementEvent(connectionContext, "nonBlockingRequest"); + METERING_CLOSURE.meterIncrementEvent(messageContext, "nonBlockingRequest"); super.channelFinishedReadingAnHttpMessage(ctx, msg, httpRequest); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index 4dcb35ea0..aa4c5ef77 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -1,8 +1,10 @@ package org.opensearch.migrations.trafficcapture.netty; import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.channel.ChannelPromise; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.HttpContent; @@ -18,18 +20,24 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; +import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; +import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import java.io.IOException; import java.time.Instant; @Slf4j -public class LoggingHttpRequestHandler extends ChannelInboundHandlerAdapter { - public static final String TELEMETRY_SCOPE_NAME = "LoggingHttpInboundHandler"; +public class LoggingHttpRequestHandler extends ChannelDuplexHandler { + public static final String TELEMETRY_SCOPE_NAME = "CapturingHttpHandler"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); + public static final String GATHERING_REQUEST = "gatheringRequest"; + public static final String GATHERING_RESPONSE = "gatheringResponse"; static class SimpleHttpRequestDecoder extends HttpRequestDecoder { /** @@ -75,20 +83,32 @@ public HttpRequest resetCurrentRequest() { protected final IChannelConnectionCaptureSerializer trafficOffloader; protected final EmbeddedChannel httpDecoderChannel; - protected final SimpleHttpRequestDecoder requestDecoder; - protected final ConnectionContext connectionContext; - public LoggingHttpRequestHandler(ConnectionContext incomingContext, - IChannelConnectionCaptureSerializer trafficOffloader) { - this.connectionContext = incomingContext; - METERING_CLOSURE.meterIncrementEvent(incomingContext, "requestStarted"); + protected HttpMessageContext messageContext; - this.trafficOffloader = trafficOffloader; - requestDecoder = new SimpleHttpRequestDecoder(); // as a field for easier debugging + public LoggingHttpRequestHandler(String nodeId, String channelKey, + IConnectionCaptureFactory trafficOffloaderFactory) + throws IOException { + var parentContext = new ConnectionContext(channelKey, nodeId, + METERING_CLOSURE.makeSpanContinuation("connectionLifetime", null)); + + this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.Direction.REQUEST, + METERING_CLOSURE.makeSpanContinuation(GATHERING_REQUEST)); + METERING_CLOSURE.meterIncrementEvent(messageContext, "requestStarted"); + + this.trafficOffloader = trafficOffloaderFactory.createOffloader(parentContext, channelKey); httpDecoderChannel = new EmbeddedChannel( - requestDecoder, - new SimpleDecodedHttpRequestHandler() - ); + new SimpleHttpRequestDecoder(), + new SimpleDecodedHttpRequestHandler()); + } + + public void rotateNextMessageContext() { + messageContext.getCurrentSpan().end(); + final var wasResponse = messageContext.getDirection() == HttpMessageContext.Direction.RESPONSE; + messageContext = new HttpMessageContext(messageContext.getEnclosingScope(), + (wasResponse ? 1 : 0) + messageContext.getSourceRequestIndex(), + (wasResponse ? HttpMessageContext.Direction.REQUEST : HttpMessageContext.Direction.RESPONSE), + METERING_CLOSURE.makeSpanContinuation(wasResponse ? GATHERING_REQUEST : GATHERING_RESPONSE)); } private HttpProcessedState parseHttpMessageParts(ByteBuf msg) { @@ -96,7 +116,7 @@ private HttpProcessedState parseHttpMessageParts(ByteBuf msg) { var state = getHandlerThatHoldsParsedHttpRequest().isDone ? HttpProcessedState.FULL_MESSAGE : HttpProcessedState.ONGOING; - METERING_CLOSURE.meterIncrementEvent(connectionContext, + METERING_CLOSURE.meterIncrementEvent(messageContext, state == HttpProcessedState.FULL_MESSAGE ? "requestFullyParsed" : "requestPartiallyParsed"); return state; } @@ -108,7 +128,7 @@ private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - METERING_CLOSURE.meterIncrementEvent(connectionContext, "unregistered"); + METERING_CLOSURE.meterIncrementEvent(messageContext, "unregistered"); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -125,8 +145,9 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { - METERING_CLOSURE.meterIncrementEvent(connectionContext, "handlerRemoved"); - connectionContext.getCurrentSpan().end(); + METERING_CLOSURE.meterIncrementEvent(messageContext, "handlerRemoved"); + messageContext.getCurrentSpan().end(); + messageContext.getEnclosingScope().currentSpan.end(); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { @@ -143,7 +164,7 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { super.channelRead(ctx, msg); - METERING_CLOSURE.meterIncrementEvent(connectionContext, "requestReceived"); + METERING_CLOSURE.meterIncrementEvent(messageContext, "requestReceived"); metricsLogger.atSuccess(MetricsEvent.RECEIVED_FULL_HTTP_REQUEST) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()) @@ -153,13 +174,16 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (messageContext.getDirection() == HttpMessageContext.Direction.RESPONSE) { + rotateNextMessageContext(); + } var timestamp = Instant.now(); HttpProcessedState httpProcessedState; { var bb = ((ByteBuf) msg).retainedDuplicate(); trafficOffloader.addReadEvent(timestamp, bb); - METERING_CLOSURE.meterIncrementEvent(connectionContext, "read"); - METERING_CLOSURE.meterIncrementEvent(connectionContext, "readBytes", bb.readableBytes()); + METERING_CLOSURE.meterIncrementEvent(messageContext, "read"); + METERING_CLOSURE.meterIncrementEvent(messageContext, "readBytes", bb.readableBytes()); metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); @@ -181,10 +205,25 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception } } + @Override + public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { + if (messageContext.getDirection() == HttpMessageContext.Direction.REQUEST) { + rotateNextMessageContext(); + } + var bb = (ByteBuf) msg; + trafficOffloader.addWriteEvent(Instant.now(), bb); + metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) + .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); + METERING_CLOSURE.meterIncrementEvent(messageContext, "write"); + METERING_CLOSURE.meterIncrementEvent(messageContext, "writeBytes", bb.readableBytes()); + + super.write(ctx, msg, promise); + } + @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - METERING_CLOSURE.meterIncrementEvent(connectionContext, "exception"); + METERING_CLOSURE.meterIncrementEvent(messageContext, "exception"); httpDecoderChannel.close(); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java index c82c1b27e..50d263550 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java @@ -12,8 +12,6 @@ import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; -import java.net.SocketAddress; -import java.time.Duration; import java.time.Instant; @Slf4j @@ -24,83 +22,55 @@ public class LoggingHttpResponseHandler extends ChannelOutboundHandlerAdapter private final IChannelConnectionCaptureSerializer trafficOffloader; private ConnectionContext telemetryContext; - private Instant connectTime; public LoggingHttpResponseHandler(ConnectionContext incomingContext, IChannelConnectionCaptureSerializer trafficOffloader) { this.trafficOffloader = trafficOffloader; this.telemetryContext = incomingContext; } - - @Override - public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) throws Exception { - trafficOffloader.addBindEvent(Instant.now(), localAddress); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "bind"); - super.bind(ctx, localAddress, promise); - } - - @Override - public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { - trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); - - telemetryContext = new ConnectionContext(telemetryContext, - METERING_CLOSURE.makeSpanContinuation("backendConnection")); - connectTime = Instant.now(); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "connect"); - METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", 1); - - super.connect(ctx, remoteAddress, localAddress, promise); - } - - @Override - public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { - trafficOffloader.addDisconnectEvent(Instant.now()); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "disconnect"); - super.disconnect(ctx, promise); - } - - @Override - public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { - trafficOffloader.addCloseEvent(Instant.now()); - - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "close"); - METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", -1); - METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connectionDuration", - Duration.between(connectTime, Instant.now())); - telemetryContext.currentSpan.end(); - } - - @Override - public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { - trafficOffloader.addDeregisterEvent(Instant.now()); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "deregister"); - super.deregister(ctx, promise); - } - - @Override - public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { - var bb = (ByteBuf) msg; - trafficOffloader.addWriteEvent(Instant.now(), bb); - metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) - .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "write"); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "writeBytes", bb.readableBytes()); - - super.write(ctx, msg, promise); - } - - @Override - public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { - flush(ctx); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "removed"); - super.handlerRemoved(ctx); - } - - @Override - public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "exception"); - super.exceptionCaught(ctx, cause); - } +// +// @Override +// public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { +// trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); +// +// telemetryContext = new ConnectionContext(telemetryContext, +// METERING_CLOSURE.makeSpanContinuation("backendConnection")); +// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "connect"); +// METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", 1); +// +// super.connect(ctx, remoteAddress, localAddress, promise); +// } + +// @Override +// public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { +// trafficOffloader.addDisconnectEvent(Instant.now()); +// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "disconnect"); +// super.disconnect(ctx, promise); +// } + +// @Override +// public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { +// trafficOffloader.addCloseEvent(Instant.now()); +// +// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "close"); +// METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", -1); +// METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connectionDuration"); +// telemetryContext.currentSpan.end(); +// } + +// @Override +// public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { +// trafficOffloader.addDeregisterEvent(Instant.now()); +// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "deregister"); +// super.deregister(ctx, promise); +// } + +// +// @Override +// public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { +// trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); +// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "exception"); +// super.exceptionCaught(ctx, cause); +// } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java new file mode 100644 index 000000000..4a670e8d0 --- /dev/null +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java @@ -0,0 +1,24 @@ +package org.opensearch.migrations.trafficcapture.netty; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; + +/** + * This is a helper class so that we can emit metrics and traces for when we're + * accumulating a request vs waiting for the next response, then repeating indefinitely. + * + * TODO - this may be a performance bottleneck and we should carefully evaluate it's utility. + */ +@Slf4j +public class RequestContextStateMachine { + @Getter + public final ConnectionContext connectionContext; + @Getter + HttpMessageContext currentRequestContext; + + public RequestContextStateMachine(ConnectionContext incoming) { + connectionContext = incoming; + } +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java new file mode 100644 index 000000000..db93967f7 --- /dev/null +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -0,0 +1,39 @@ +package org.opensearch.migrations.trafficcapture.netty.tracing; + +import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; +import org.opensearch.migrations.tracing.commoncontexts.IRequestContext; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; + +import java.time.Instant; + +public class HttpMessageContext implements IRequestContext, IWithStartTimeAndAttributes { + + public enum Direction { + REQUEST, + RESPONSE + } + + @Getter + final long sourceRequestIndex; + @Getter + final ConnectionContext enclosingScope; + @Getter + final Instant startTime; + @Getter + final Direction direction; + @Getter + final Span currentSpan; + + public HttpMessageContext(ConnectionContext enclosingScope, long sourceRequestIndex, Direction direction, + ISpanWithParentGenerator spanGenerator) { + this.sourceRequestIndex = sourceRequestIndex; + this.enclosingScope = enclosingScope; + this.startTime = Instant.now(); + this.direction = direction; + this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan()); + } +} diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index 8dfd72c79..0cc960ea7 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -80,13 +80,13 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer outputByteBuffer = new AtomicReference<>(); AtomicInteger flushCount = new AtomicInteger(); - var offloader = new StreamChannelConnectionCaptureSerializer("Test", "connection", - new StreamManager(outputByteBuffer, flushCount)); + ; - var ctx = new ConnectionContext("c", "n", - x->GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); EmbeddedChannel channel = new EmbeddedChannel( - new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, x->true)); // true: block every request + new ConditionallyReliableLoggingHttpRequestHandler("n", "c", + (ctx, connectionId) -> new StreamChannelConnectionCaptureSerializer("Test", connectionId, + new StreamManager(outputByteBuffer, flushCount)), + x->true)); // true: block every request channelWriter.accept(channel); // we wrote the correct data to the downstream handler/channel diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 7350e1f4f..d2466a3d9 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -174,7 +174,7 @@ private static Settings getSettings(@NonNull String configFile) { private static IConnectionCaptureFactory getNullConnectionCaptureFactory() { System.err.println("No trace log directory specified. Logging to /dev/null"); - return (ctx, connectionId) -> new StreamChannelConnectionCaptureSerializer<>(null, connectionId, + return (ctx,connectionId) -> new StreamChannelConnectionCaptureSerializer<>(null, connectionId, new StreamLifecycleManager<>() { @Override public void close() {} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index f1e86ff2a..5d492db34 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -5,6 +5,7 @@ import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.ssl.SslHandler; +import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpRequestHandler; @@ -45,12 +46,9 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - var ctx = new ConnectionContext(connectionId, "", - METERING_CLOSURE.makeSpanContinuation("connectionLifetime", null)); - var offloader = connectionCaptureFactory.createOffloader(ctx, connectionId); - ch.pipeline().addLast(new LoggingHttpResponseHandler<>(ctx, offloader)); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpRequestHandler(ctx, offloader, - this::shouldGuaranteeMessageOffloading)); + var capturingHandler = new ConditionallyReliableLoggingHttpRequestHandler<>(null, connectionId, + connectionCaptureFactory, this::shouldGuaranteeMessageOffloading); + ch.pipeline().addLast(capturingHandler); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 4df9735dc..dae5e2d05 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -78,11 +78,9 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, String diagnosticLabel, RequestContext requestContext) { this.transformationContext = new IWithStartTimeAndAttributes<>() { - @Getter - Span currentSpan = METERING_CLOSURE.makeSpanContinuation("httpRequestTransformation") + @Getter Span currentSpan = METERING_CLOSURE.makeSpanContinuation("httpRequestTransformation") .apply(requestContext.getPopulatedAttributes(), requestContext.getCurrentSpan()); - @Getter - Instant startTime = Instant.now(); + @Getter Instant startTime = Instant.now(); @Override public RequestContext getEnclosingScope() { return requestContext; } }; chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java index 2fd747f32..a3d509105 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java @@ -41,10 +41,6 @@ public String getConnectionId() { return enclosingScope.getConnectionId(); } - public String getNodeId() { - return enclosingScope.getNodeId(); - } - @Override public long getSourceRequestIndex() { return replayerRequestKey.getSourceRequestIndex(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index f666bd9e7..16a1286e7 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -18,6 +18,7 @@ import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -99,8 +100,8 @@ static ByteBuf makeSequentialByteBuf(int offset, int size) { static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, List directives) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); - var offloader = connectionFactory.createOffloader(new ConnectionContext("test", "test", - x->GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()), + var offloader = connectionFactory.createOffloader(new ConnectionContext("n", "test", + new SimpleMeteringClosure("test").makeSpanContinuation("test", null)), "TEST_"+uniqueIdCounter.incrementAndGet()); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); diff --git a/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties b/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties index 9098da413..43e08b306 100644 --- a/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties +++ b/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties @@ -18,3 +18,8 @@ logger.OutputTupleJsonLogger.level = OFF logger.KPC.name = org.opensearch.migrations.replay.kafka.KafkaProtobufConsumer logger.KPC.level = DEBUG logger.KPC.appenderRef.stdout.ref = Console + +logger.RSO.name = org.opensearch.migrations.replay.RequestSenderOrchestrator +logger.RSO.level = TRACE +logger.RSO.additivity = false +logger.RSO.appenderRef.RSO.ref = Console From 15a1705c35143b5fa168e4f2ade79bc38db824b9 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 1 Dec 2023 20:02:24 -0500 Subject: [PATCH 14/94] Improve the tracing story for the capture proxy. Don't bother showing the Kakfa offloader just buffering (was called recordStream). Now the offloader span is a child span of the connection span from the handler, so we can see the handler gathering the request/response (or waiting for the response). Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 6 +-- .../netty/LoggingHttpRequestHandler.java | 37 +++++++++++++------ .../netty/tracing/HttpMessageContext.java | 11 +++--- 3 files changed, 33 insertions(+), 21 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index c1c84ad81..bf502596c 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -71,7 +71,6 @@ public IChannelConnectionCaptureSerializer createOffloader(IConn static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { private final CodedOutputStream codedOutputStream; private final ByteBuffer byteBuffer; - final ConnectionContext streamContext; @Override public @NonNull CodedOutputStream getOutputStream() { return codedOutputStream; @@ -104,11 +103,9 @@ public void close() throws IOException { @Override public CodedOutputStreamWrapper createStream() { METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_created"); - var newStreamCtx = new ConnectionContext(telemetryContext, - METERING_CLOSURE.makeSpanContinuation("recordStream")); ByteBuffer bb = ByteBuffer.allocate(bufferSize); - return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb, newStreamCtx); + return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb); } @Override @@ -119,7 +116,6 @@ public CodedOutputStreamWrapper createStream() { outputStreamHolder); } var osh = (CodedOutputStreamWrapper) outputStreamHolder; - osh.streamContext.currentSpan.end(); // Structured context for MetricsLogger try { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index aa4c5ef77..84d34ab35 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -20,7 +20,6 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; -import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; @@ -37,6 +36,7 @@ public class LoggingHttpRequestHandler extends ChannelDuplexHandler { public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); public static final String GATHERING_REQUEST = "gatheringRequest"; + public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; public static final String GATHERING_RESPONSE = "gatheringResponse"; static class SimpleHttpRequestDecoder extends HttpRequestDecoder { @@ -92,7 +92,7 @@ public LoggingHttpRequestHandler(String nodeId, String channelKey, var parentContext = new ConnectionContext(channelKey, nodeId, METERING_CLOSURE.makeSpanContinuation("connectionLifetime", null)); - this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.Direction.REQUEST, + this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.HttpTransactionState.REQUEST, METERING_CLOSURE.makeSpanContinuation(GATHERING_REQUEST)); METERING_CLOSURE.meterIncrementEvent(messageContext, "requestStarted"); @@ -102,13 +102,27 @@ public LoggingHttpRequestHandler(String nodeId, String channelKey, new SimpleDecodedHttpRequestHandler()); } - public void rotateNextMessageContext() { + static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state) { + switch (state) { + case REQUEST: + return GATHERING_REQUEST; + case WAITING: + return WAITING_FOR_RESPONSE; + case RESPONSE: + return GATHERING_RESPONSE; + default: + throw new IllegalStateException("Unknown enum value: "+state); + } + } + + public void rotateNextMessageContext(HttpMessageContext.HttpTransactionState nextState) { messageContext.getCurrentSpan().end(); - final var wasResponse = messageContext.getDirection() == HttpMessageContext.Direction.RESPONSE; + final var wasResponse = HttpMessageContext.HttpTransactionState.RESPONSE.equals(messageContext.getState()); messageContext = new HttpMessageContext(messageContext.getEnclosingScope(), - (wasResponse ? 1 : 0) + messageContext.getSourceRequestIndex(), - (wasResponse ? HttpMessageContext.Direction.REQUEST : HttpMessageContext.Direction.RESPONSE), - METERING_CLOSURE.makeSpanContinuation(wasResponse ? GATHERING_REQUEST : GATHERING_RESPONSE)); + (nextState== HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) + + messageContext.getSourceRequestIndex(), + nextState, + METERING_CLOSURE.makeSpanContinuation(getSpanLabelForState(nextState))); } private HttpProcessedState parseHttpMessageParts(ByteBuf msg) { @@ -163,6 +177,7 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { } protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, HttpRequest httpRequest) throws Exception { + rotateNextMessageContext(HttpMessageContext.HttpTransactionState.WAITING); super.channelRead(ctx, msg); METERING_CLOSURE.meterIncrementEvent(messageContext, "requestReceived"); @@ -174,8 +189,8 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - if (messageContext.getDirection() == HttpMessageContext.Direction.RESPONSE) { - rotateNextMessageContext(); + if (messageContext.getState() == HttpMessageContext.HttpTransactionState.RESPONSE) { + rotateNextMessageContext(HttpMessageContext.HttpTransactionState.REQUEST); } var timestamp = Instant.now(); HttpProcessedState httpProcessedState; @@ -207,8 +222,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { - if (messageContext.getDirection() == HttpMessageContext.Direction.REQUEST) { - rotateNextMessageContext(); + if (messageContext.getState() != HttpMessageContext.HttpTransactionState.RESPONSE) { + rotateNextMessageContext(HttpMessageContext.HttpTransactionState.RESPONSE); } var bb = (ByteBuf) msg; trafficOffloader.addWriteEvent(Instant.now(), bb); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index db93967f7..f0e4a48fc 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; import io.opentelemetry.api.trace.Span; +import lombok.EqualsAndHashCode; import lombok.Getter; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; @@ -11,9 +12,9 @@ import java.time.Instant; public class HttpMessageContext implements IRequestContext, IWithStartTimeAndAttributes { - - public enum Direction { + public enum HttpTransactionState { REQUEST, + WAITING, RESPONSE } @@ -24,16 +25,16 @@ public enum Direction { @Getter final Instant startTime; @Getter - final Direction direction; + final HttpTransactionState state; @Getter final Span currentSpan; - public HttpMessageContext(ConnectionContext enclosingScope, long sourceRequestIndex, Direction direction, + public HttpMessageContext(ConnectionContext enclosingScope, long sourceRequestIndex, HttpTransactionState state, ISpanWithParentGenerator spanGenerator) { this.sourceRequestIndex = sourceRequestIndex; this.enclosingScope = enclosingScope; this.startTime = Instant.now(); - this.direction = direction; + this.state = state; this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan()); } } From 8a6f52af7752e9299b753d8be1a45241da42bd0d Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 2 Dec 2023 08:27:20 -0500 Subject: [PATCH 15/94] Tracing change: Flatten the flush span and just record it as 'blocked'. That makes it a separate state for the logging handler superclass. Signed-off-by: Greg Schohn --- ...ditionallyReliableLoggingHttpRequestHandler.java | 13 ++++--------- .../netty/LoggingHttpRequestHandler.java | 9 +++++++-- .../netty/tracing/HttpMessageContext.java | 1 + ...onallyReliableLoggingHttpRequestHandlerTest.java | 1 - 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java index 29df56e56..8f5d583fa 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java @@ -36,20 +36,15 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob throws Exception { if (shouldBlockPredicate.test(httpRequest)) { METERING_CLOSURE.meterIncrementEvent(messageContext, "blockingRequestUntilFlush"); - var flushContext = new IWithStartTimeAndAttributes<>() { - @Getter Span currentSpan = METERING_CLOSURE.makeSpanContinuation("blockedForFlush") - .apply(messageContext.getPopulatedAttributes(), messageContext.getCurrentSpan()); - @Getter Instant startTime = Instant.now(); - @Override public HttpMessageContext getEnclosingScope() { return messageContext; } - }; + rotateNextMessageContext(HttpMessageContext.HttpTransactionState.INTERNALLY_BLOCKED); trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); - METERING_CLOSURE.meterIncrementEvent(flushContext, + METERING_CLOSURE.meterIncrementEvent(messageContext, t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); - METERING_CLOSURE.meterHistogramMicros(flushContext, + METERING_CLOSURE.meterHistogramMicros(messageContext, t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro"); - flushContext.currentSpan.end(); + messageContext.getCurrentSpan().end(); if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index 84d34ab35..d7ffa3fb9 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -38,6 +38,7 @@ public class LoggingHttpRequestHandler extends ChannelDuplexHandler { public static final String GATHERING_REQUEST = "gatheringRequest"; public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; public static final String GATHERING_RESPONSE = "gatheringResponse"; + public static final String BLOCKED = "blocked"; static class SimpleHttpRequestDecoder extends HttpRequestDecoder { /** @@ -106,6 +107,8 @@ static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state switch (state) { case REQUEST: return GATHERING_REQUEST; + case INTERNALLY_BLOCKED: + return BLOCKED; case WAITING: return WAITING_FOR_RESPONSE; case RESPONSE: @@ -115,8 +118,7 @@ static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state } } - public void rotateNextMessageContext(HttpMessageContext.HttpTransactionState nextState) { - messageContext.getCurrentSpan().end(); + protected void rotateNextMessageContext(HttpMessageContext.HttpTransactionState nextState) { final var wasResponse = HttpMessageContext.HttpTransactionState.RESPONSE.equals(messageContext.getState()); messageContext = new HttpMessageContext(messageContext.getEnclosingScope(), (nextState== HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) @@ -190,6 +192,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (messageContext.getState() == HttpMessageContext.HttpTransactionState.RESPONSE) { + messageContext.getCurrentSpan().end(); rotateNextMessageContext(HttpMessageContext.HttpTransactionState.REQUEST); } var timestamp = Instant.now(); @@ -206,6 +209,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception httpProcessedState = parseHttpMessageParts(bb); // bb is consumed/release by this method } if (httpProcessedState == HttpProcessedState.FULL_MESSAGE) { + messageContext.getCurrentSpan().end(); var httpRequest = getHandlerThatHoldsParsedHttpRequest().resetCurrentRequest(); var decoderResultLoose = httpRequest.decoderResult(); if (decoderResultLoose instanceof HttpMessageDecoderResult) { @@ -223,6 +227,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (messageContext.getState() != HttpMessageContext.HttpTransactionState.RESPONSE) { + messageContext.getCurrentSpan().end(); rotateNextMessageContext(HttpMessageContext.HttpTransactionState.RESPONSE); } var bb = (ByteBuf) msg; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index f0e4a48fc..e406c4225 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -14,6 +14,7 @@ public class HttpMessageContext implements IRequestContext, IWithStartTimeAndAttributes { public enum HttpTransactionState { REQUEST, + INTERNALLY_BLOCKED, WAITING, RESPONSE } diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index 0cc960ea7..185dcd0b5 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -80,7 +80,6 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer outputByteBuffer = new AtomicReference<>(); AtomicInteger flushCount = new AtomicInteger(); - ; EmbeddedChannel channel = new EmbeddedChannel( new ConditionallyReliableLoggingHttpRequestHandler("n", "c", From c50e01d228ef0c2a07d8cb5517b5395d074e6b25 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 4 Dec 2023 16:45:37 -0500 Subject: [PATCH 16/94] Minor cleanup - stop setting the namespace or trying to change in a processor. Prometheus metrics already have an export_name that is unique, the processors weren't doing anything useful, & the namespace was appending EVERYTHING from one of the two services. Signed-off-by: Greg Schohn --- .../docker/otel-collector-config-demo.yaml | 22 ------------------- .../netty/LoggingHttpRequestHandler.java | 1 - 2 files changed, 23 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml index 99ac784a8..ac9a2a6d5 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml @@ -6,7 +6,6 @@ receivers: exporters: prometheus: endpoint: "0.0.0.0:8889" - namespace: capturereplay const_labels: label1: value1 logging: @@ -21,26 +20,6 @@ exporters: tls: insecure: true - -processors: - # Processor to set the namespace based on the service name - attributes/nscapture: - actions: - - key: namespace - value: "capture" - action: insert - - key: service.name - value: "capture" - action: update - attributes/nsreplayer: - actions: - - key: namespace - value: "replay" - action: insert - - key: service.name - value: "replay" - action: update - extensions: health_check: pprof: @@ -57,5 +36,4 @@ service: exporters: [logging, zipkin, otlp/jaeger] metrics: receivers: [otlp] - processors: [attributes/nscapture, attributes/nsreplayer] exporters: [logging, prometheus] diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index d7ffa3fb9..8c8fc5135 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -119,7 +119,6 @@ static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state } protected void rotateNextMessageContext(HttpMessageContext.HttpTransactionState nextState) { - final var wasResponse = HttpMessageContext.HttpTransactionState.RESPONSE.equals(messageContext.getState()); messageContext = new HttpMessageContext(messageContext.getEnclosingScope(), (nextState== HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) + messageContext.getSourceRequestIndex(), From 17c517dddf49ce915ba80c363398629a237351fb Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 4 Dec 2023 16:47:43 -0500 Subject: [PATCH 17/94] Start instrumenting the replayer with more contexts so that traces and (less so for now) metrics can be exported across more of the lifetime of a request/connection. Signed-off-by: Greg Schohn --- .../migrations/replay/Accumulation.java | 10 +++- ...edTrafficToHttpTransactionAccumulator.java | 4 ++ .../replay/ClientConnectionPool.java | 9 ++- .../migrations/replay/ReplayEngine.java | 10 ++-- .../replay/RequestSenderOrchestrator.java | 55 +++++++++---------- .../migrations/replay/TrafficReplayer.java | 2 +- .../NettyPacketToHttpConsumer.java | 4 +- .../datatypes/ConnectionReplaySession.java | 5 +- .../replay/tracing/RequestContext.java | 9 +-- .../replay/RequestSenderOrchestratorTest.java | 2 +- 10 files changed, 56 insertions(+), 54 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index 8d657613a..1fcb4ff47 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -48,7 +48,7 @@ public Accumulation(@NonNull ITrafficStreamKey trafficChannelKey, this.state = dropObservationsLeftoverFromPrevious ? State.IGNORING_LAST_REQUEST : State.WAITING_FOR_NEXT_READ_CHUNK; channelContext = new ChannelKeyContext(trafficChannelKey, - METERING_CLOSURE.makeSpanContinuation("accumulatingChannel", null)); + METERING_CLOSURE.makeSpanContinuation("processingChannel", null)); } public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey forTrafficStreamKey) { @@ -87,6 +87,14 @@ public boolean hasRrPair() { return rrPair; } + public void rotateRequestGatheringToResponse() { + var ctx = rrPair.requestContext; + ctx.getCurrentSpan().end(); + rrPair.requestContext = new RequestContext(ctx.getEnclosingScope(), + ctx.getReplayerRequestKey(), + METERING_CLOSURE.makeSpanContinuation("accumulatingResponse")); + } + public Instant getLastTimestamp() { return Instant.ofEpochMilli(newestPacketTimestampInMillis.get()); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 35cde47fd..28d1584ca 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -221,6 +221,7 @@ private static List getTrafficStreamsHeldByAccum(Accumulation accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); closedConnectionCounter.incrementAndGet(); + accum.channelContext.getCurrentSpan().end(); listener.onConnectionClose(accum.trafficChannelKey, accum.getIndexOfCurrentRequest(), accum.channelContext, RequestResponsePacketPair.ReconstructionStatus.COMPLETE, timestamp, getTrafficStreamsHeldByAccum(accum)); @@ -344,6 +345,7 @@ private boolean handleEndOfRequest(Accumulation accumulation) { .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); assert (requestPacketBytes != null); assert (!requestPacketBytes.hasInProgressSegment()); + accumulation.rotateRequestGatheringToResponse(); listener.onRequestReceived(accumulation.getRequestKey(), rrPair.requestContext, requestPacketBytes); accumulation.state = Accumulation.State.ACCUMULATING_WRITES; return true; @@ -356,6 +358,7 @@ private void handleEndOfResponse(Accumulation accumulation, RequestResponsePacke .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); var rrPair = accumulation.getRrPair(); rrPair.completionStatus = status; + rrPair.requestContext.getCurrentSpan().end(); listener.onFullDataReceived(accumulation.getRequestKey(), rrPair.requestContext, rrPair); accumulation.resetForNextRequest(); } @@ -400,6 +403,7 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, } } finally { if (accumulation.hasSignaledRequests()) { + accumulation.channelContext.getCurrentSpan().end(); listener.onConnectionClose(accumulation.trafficChannelKey, accumulation.getIndexOfCurrentRequest(), accumulation.channelContext, status, accumulation.getLastTimestamp(), getTrafficStreamsHeldByAccum(accumulation)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index a40cbb01a..f0a41143a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -17,7 +17,6 @@ import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -141,7 +140,7 @@ public void closeConnection(String connId) { } public Future - submitEventualSessionGet(ISourceTrafficChannelKey channelKey, boolean ignoreIfNotPresent, ChannelKeyContext ctx) { + submitEventualSessionGet(ChannelKeyContext channelKey, boolean ignoreIfNotPresent, ChannelKeyContext ctx) { ConnectionReplaySession channelFutureAndSchedule = getCachedSession(channelKey, ignoreIfNotPresent); if (channelFutureAndSchedule == null) { @@ -159,11 +158,11 @@ public void closeConnection(String connId) { } @SneakyThrows - public ConnectionReplaySession getCachedSession(ISourceTrafficChannelKey channelKey, boolean dontCreate) { + public ConnectionReplaySession getCachedSession(ChannelKeyContext channelKey, boolean dontCreate) { var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKey.getConnectionId()) : connectionId2ChannelCache.get(channelKey.getConnectionId()); if (crs != null) { - crs.setChannelId(channelKey); + crs.setChannelContext(channelKey); } return crs; } @@ -188,7 +187,7 @@ public ConnectionReplaySession getCachedSession(ISourceTrafficChannelKey channel if (channelAndFutureWork.hasWorkRemaining()) { log.atWarn().setMessage(()->"Work items are still remaining for this connection session" + "(last associated with connection=" + - channelAndFutureWork.getChannelId() + + channelAndFutureWork.getChannelContext() + "). " + channelAndFutureWork.calculateSizeSlowly() + " requests that were enqueued won't be run").log(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index f75b92331..c80fdabfa 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -124,15 +124,15 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - scheduleTransformationWork(UniqueReplayerRequestKey requestKey, Instant originalStart, + scheduleTransformationWork(RequestContext requestCtx, Instant originalStart, Supplier> task) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "processing"; var start = timeShifter.transformSourceTimeToRealTime(originalStart); - logStartOfWork(requestKey, newCount, start, label); - var result = networkSendOrchestrator.scheduleWork(requestKey.trafficStreamKey, + logStartOfWork(requestCtx, newCount, start, label); + var result = networkSendOrchestrator.scheduleWork(requestCtx.getEnclosingScope(), start.minus(EXPECTED_TRANSFORMATION_DURATION), task); - return hookWorkFinishingUpdates(result, originalStart, requestKey, label); + return hookWorkFinishingUpdates(result, originalStart, requestCtx, label); } public DiagnosticTrackableCompletableFuture @@ -159,7 +159,7 @@ public void closeConnection(ISourceTrafficChannelKey channelKey, int channelInte final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); logStartOfWork(new IndexedChannelInteraction(channelKey, channelInteractionNum), newCount, atTime, label); - var future = networkSendOrchestrator.scheduleClose(channelKey, channelInteractionNum, ctx, atTime); + var future = networkSendOrchestrator.scheduleClose(ctx, channelInteractionNum, atTime); hookWorkFinishingUpdates(future, timestamp, channelKey, label); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index 107c21bae..45bbaf521 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -36,13 +36,13 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { } public DiagnosticTrackableCompletableFuture - scheduleWork(ISourceTrafficChannelKey channelKey, Instant timestamp, + scheduleWork(ChannelKeyContext ctx, Instant timestamp, Supplier> task) { - var connectionSession = clientConnectionPool.getCachedSession(channelKey, false); + var connectionSession = clientConnectionPool.getCachedSession(ctx, false); var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); - log.atDebug().setMessage(()->"Scheduling work for "+channelKey+" at time "+timestamp).log(); + log.atDebug().setMessage(()->"Scheduling work for "+ctx.getConnectionId()+" at time "+timestamp).log(); connectionSession.eventLoop.schedule(()-> task.get().map(f->f.whenComplete((v,t) -> { if (t!=null) { @@ -63,25 +63,24 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final aggregated response"); log.atDebug().setMessage(()->"Scheduling request for "+requestKey+" at start time "+start).log(); - return asynchronouslyInvokeRunnableToSetupFuture(requestKey.getTrafficStreamKey(), - requestKey.getReplayerRequestIndex(), ctx.getChannelKeyContext(), false, finalTunneledResponse, - channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(requestKey, ctx, + return asynchronouslyInvokeRunnableToSetupFuture( + ctx.getEnclosingScope(), requestKey.getReplayerRequestIndex(), false, finalTunneledResponse, + channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(ctx, channelFutureAndRequestSchedule, finalTunneledResponse, start, interval, packets)); } - public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChannelKey channelKey, - int channelInteractionNum, - ChannelKeyContext ctx, + public StringTrackableCompletableFuture scheduleClose(ChannelKeyContext ctx, int channelInteractionNum, Instant timestamp) { + var channelKey = ctx.getChannelKey(); var channelInteraction = new IndexedChannelInteraction(channelKey, channelInteractionNum); var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm close has finished"); log.atDebug().setMessage(()->"Scheduling CLOSE for "+channelInteraction+" at time "+timestamp).log(); - asynchronouslyInvokeRunnableToSetupFuture(channelKey, channelInteractionNum, ctx,true, + asynchronouslyInvokeRunnableToSetupFuture(ctx, channelInteractionNum, true, finalTunneledResponse, channelFutureAndRequestSchedule-> - scheduleOnConnectionReplaySession(channelKey, channelInteractionNum, ctx, + scheduleOnConnectionReplaySession(ctx, channelInteractionNum, channelFutureAndRequestSchedule, finalTunneledResponse, timestamp, "close", () -> { log.trace("Closing client connection " + channelInteraction); clientConnectionPool.closeConnection(channelKey.getConnectionId()); @@ -92,21 +91,20 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne } private DiagnosticTrackableCompletableFuture - asynchronouslyInvokeRunnableToSetupFuture(ISourceTrafficChannelKey channelKey, int channelInteractionNumber, - ChannelKeyContext ctx, boolean ignoreIfChannelNotPresent, + asynchronouslyInvokeRunnableToSetupFuture(ChannelKeyContext ctx, int channelInteractionNumber, + boolean ignoreIfChannelNotPresent, DiagnosticTrackableCompletableFuture finalTunneledResponse, Consumer successFn) { var channelFutureAndScheduleFuture = - clientConnectionPool.submitEventualSessionGet(channelKey, ignoreIfChannelNotPresent, ctx); + clientConnectionPool.submitEventualSessionGet(ctx, ignoreIfChannelNotPresent, ctx); channelFutureAndScheduleFuture.addListener(submitFuture->{ if (!submitFuture.isSuccess()) { log.atError().setCause(submitFuture.cause()) - .setMessage(()->channelKey.toString() + " unexpected issue found from a scheduled task") - .log(); + .setMessage(()->ctx + " unexpected issue found from a scheduled task").log(); finalTunneledResponse.future.completeExceptionally(submitFuture.cause()); } else { - log.atTrace().setMessage(()->channelKey.toString() + - " on the channel's thread... getting a ConnectionReplaySession for it").log(); + log.atTrace().setMessage(()->ctx + " on the channel's thread... " + + "getting a ConnectionReplaySession for it").log(); var channelFutureAndRequestSchedule = ((ConnectionReplaySession) submitFuture.get()); if (channelFutureAndRequestSchedule == null) { finalTunneledResponse.future.complete(null); @@ -115,8 +113,8 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne channelFutureAndRequestSchedule.getChannelFutureFuture() .map(channelFutureGetAttemptFuture->channelFutureGetAttemptFuture .thenAccept(v->{ - log.atTrace().setMessage(()->channelKey.toString() + " in submitFuture(success) and scheduling the task" + - " for " + finalTunneledResponse.toString()).log(); + log.atTrace().setMessage(()->ctx + " in submitFuture(success) and " + + "scheduling the task for " + finalTunneledResponse.toString()).log(); assert v.channel() == channelFutureAndRequestSchedule.getChannelFutureFuture().future .getNow(null).channel(); @@ -127,13 +125,13 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne () -> successFn.accept(channelFutureAndRequestSchedule), x -> x.run()); if (cffr.scheduleSequencer.hasPending()) { - log.atDebug().setMessage(()->"Sequencer for "+channelKey+ + log.atDebug().setMessage(()->"Sequencer for "+ctx+ " = "+cffr.scheduleSequencer).log(); } }); }) .exceptionally(t->{ - log.atTrace().setCause(t).setMessage(()->channelKey.toString() + + log.atTrace().setCause(t).setMessage(()->ctx + " ChannelFuture creation threw an exception").log(); finalTunneledResponse.future.completeExceptionally(t); return null; @@ -144,12 +142,11 @@ public StringTrackableCompletableFuture scheduleClose(ISourceTrafficChanne return finalTunneledResponse; } - private void scheduleOnConnectionReplaySession(ISourceTrafficChannelKey channelKey, int channelInteractionIdx, - ChannelKeyContext ctx, + private void scheduleOnConnectionReplaySession(ChannelKeyContext ctx, int channelInteractionIdx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture futureToBeCompletedByTask, Instant atTime, String activityNameForLogging, Runnable task) { - var channelInteraction = new IndexedChannelInteraction(channelKey, channelInteractionIdx); + var channelInteraction = new IndexedChannelInteraction(ctx.getChannelKey(), channelInteractionIdx); log.atInfo().setMessage(()->channelInteraction + " scheduling " + activityNameForLogging + " at " + atTime).log(); var schedule = channelFutureAndRequestSchedule.schedule; @@ -191,7 +188,7 @@ private void scheduleOnConnectionReplaySession(ISourceTrafficChannelKey chan "... " + schedule).log(); } - private void scheduleSendOnConnectionReplaySession(UniqueReplayerRequestKey requestKey, RequestContext ctx, + private void scheduleSendOnConnectionReplaySession(RequestContext ctx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture responseFuture, Instant start, Duration interval, Stream packets) { @@ -201,9 +198,9 @@ private void scheduleSendOnConnectionReplaySession(UniqueReplayerRequestKey requ getPacketReceiver(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); - scheduleOnConnectionReplaySession(requestKey.trafficStreamKey, requestKey.getSourceRequestIndex(), - ctx.getChannelKeyContext(), channelFutureAndRequestSchedule, responseFuture, start, - "send", packetSender); + scheduleOnConnectionReplaySession(ctx.getEnclosingScope(), + ctx.getReplayerRequestKey().getSourceRequestIndex(), + channelFutureAndRequestSchedule, responseFuture, start, "send", packetSender); } private void runAfterChannelSetup(ConnectionReplaySession channelFutureAndItsFutureRequests, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 85b90d822..850be007b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -874,7 +874,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture> packetsSupplier) { try { - var transformationCompleteFuture = replayEngine.scheduleTransformationWork(requestKey, start, ()-> + var transformationCompleteFuture = replayEngine.scheduleTransformationWork(ctx, start, ()-> transformAllData(inputRequestTransformerFactory.create(requestKey, ctx), packetsSupplier)); log.atDebug().setMessage(()->"finalizeRequest future for transformation of " + requestKey + " = " + transformationCompleteFuture).log(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 35615fe44..361688ced 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -39,8 +39,6 @@ @Slf4j public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer { - private static final ContextKey START_OF_REQUEST_KEY = ContextKey.named("startOfRequest"); - private static final ContextKey START_OF_WRITE_KEY = ContextKey.named("startOfWrite"); public static final String TELEMETRY_SCOPE_NAME = "HttpSender"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); @@ -64,7 +62,7 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer> foo; - public RequestContext(ChannelKeyContext enclosingScope, UniqueReplayerRequestKey replayerRequestKey, ISpanWithParentGenerator spanGenerator) { this.enclosingScope = enclosingScope; @@ -32,11 +30,6 @@ public RequestContext(ChannelKeyContext enclosingScope, UniqueReplayerRequestKey this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan()); } - public ChannelKeyContext getChannelKeyContext() { - return new ChannelKeyContext(replayerRequestKey.trafficStreamKey, - innerAttributesToIgnore_LeavingOriginalAttributesInPlace->currentSpan); - } - public String getConnectionId() { return enclosingScope.getConnectionId(); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index 5fda5f7cf..a8701076d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -52,7 +52,7 @@ public void testThatSchedulingWorks() throws Exception { } var connectionCtx = TestRequestKey.getTestConnectionRequestContext(NUM_REQUESTS_TO_SCHEDULE); var closeFuture = senderOrchestrator.scheduleClose( - connectionCtx.getChannelKey(), NUM_REQUESTS_TO_SCHEDULE, connectionCtx.getChannelKeyContext(), + connectionCtx.getEnclosingScope(), NUM_REQUESTS_TO_SCHEDULE, lastEndTime.plus(Duration.ofMillis(100))); Assertions.assertEquals(NUM_REQUESTS_TO_SCHEDULE, scheduledItems.size()); From 628884407afe2e93b1f0c1c1e80b8bad9590b79e Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 11 Dec 2023 12:54:23 -0500 Subject: [PATCH 18/94] Double down on using Context objects in lieu of String labels and fix a test bug. Signed-off-by: Greg Schohn --- .../migrations/replay/AddCompressionEncodingTest.java | 2 +- .../replay/PacketToTransformingHttpHandlerFactory.java | 2 +- .../datahandlers/http/HttpJsonTransformingConsumer.java | 3 +-- ...NettyDecodedHttpRequestPreliminaryConvertHandler.java | 3 +-- .../http/NettySendByteBufsToPacketHandlerHandler.java | 9 +++++---- .../datahandlers/http/RequestPipelineOrchestrator.java | 9 +++------ .../migrations/replay/tracing/ChannelKeyContext.java | 7 +++++++ .../migrations/replay/tracing/RequestContext.java | 5 +++++ .../migrations/replay/HeaderTransformerTest.java | 6 +++--- .../datahandlers/NettyPacketToHttpConsumerTest.java | 7 ++----- .../http/HttpJsonTransformingConsumerTest.java | 9 ++++----- .../org/opensearch/migrations/replay/TestRequestKey.java | 9 ++++++++- .../java/org/opensearch/migrations/replay/TestUtils.java | 2 +- 13 files changed, 42 insertions(+), 31 deletions(-) diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java index 33817c884..3bb3c936b 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java @@ -33,7 +33,7 @@ public void addingCompressionRequestHeaderCompressesPayload() throws ExecutionEx var compressingTransformer = new HttpJsonTransformingConsumer( JsonJoltTransformer.newBuilder() .addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP) - .build(), null, testPacketCapture, "TEST", + .build(), null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); final var payloadPartSize = 511; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index d827f28bf..005c7896e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -30,6 +30,6 @@ public PacketToTransformingHttpHandlerFactory(IJsonTransformer jsonTransformer, create(UniqueReplayerRequestKey requestKey, RequestContext requestContext) { log.trace("creating HttpJsonTransformingConsumer"); return new HttpJsonTransformingConsumer<>(jsonTransformer, authTransformerFactory, - new TransformedPacketReceiver(), requestKey.toString(), requestContext); + new TransformedPacketReceiver(), requestContext); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index dae5e2d05..ee25911e3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -75,7 +75,6 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume public HttpJsonTransformingConsumer(IJsonTransformer transformer, IAuthTransformerFactory authTransformerFactory, IPacketFinalizingConsumer transformedPacketReceiver, - String diagnosticLabel, RequestContext requestContext) { this.transformationContext = new IWithStartTimeAndAttributes<>() { @Getter Span currentSpan = METERING_CLOSURE.makeSpanContinuation("httpRequestTransformation") @@ -88,7 +87,7 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); channel = new EmbeddedChannel(); pipelineOrchestrator = new RequestPipelineOrchestrator<>(chunkSizes, transformedPacketReceiver, - authTransformerFactory, diagnosticLabel, requestContext); + authTransformerFactory, requestContext); pipelineOrchestrator.addInitialHandlers(channel.pipeline(), transformer); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 5db40bb9a..2957a7a70 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -33,12 +33,11 @@ public class NettyDecodedHttpRequestPreliminaryConvertHandler extends Channel public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, - String diagnosticLabel, RequestContext requestContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; - this.diagnosticLabel = "[" + diagnosticLabel + "] "; + this.diagnosticLabel = "[" + requestContext + "] "; this.requestContext = requestContext; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java index a0f405c8e..e46763082 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java @@ -8,6 +8,7 @@ import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; +import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -28,15 +29,15 @@ public class NettySendByteBufsToPacketHandlerHandler extends ChannelInboundHa DiagnosticTrackableCompletableFuture currentFuture; private AtomicReference>> packetReceiverCompletionFutureRef; - String diagnosticLabel; + RequestContext requestContext; public NettySendByteBufsToPacketHandlerHandler(IPacketFinalizingConsumer packetReceiver, - String diagnosticLabel) { + RequestContext requestContext) { this.packetReceiver = packetReceiver; this.packetReceiverCompletionFutureRef = new AtomicReference<>(); - this.diagnosticLabel = diagnosticLabel; + this.requestContext = requestContext; currentFuture = DiagnosticTrackableCompletableFuture.Factory.completedFuture(null, - ()->"currentFuture for NettySendByteBufsToPacketHandlerHandler initialized to the base case for " + diagnosticLabel); + ()->"currentFuture for NettySendByteBufsToPacketHandlerHandler initialized to the base case for " + requestContext); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 650c5e003..36c72e28b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -42,7 +42,6 @@ public class RequestPipelineOrchestrator { public static final String HTTP_REQUEST_DECODER_NAME = "HTTP_REQUEST_DECODER"; private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; - final String diagnosticLabel; private RequestContext requestContext; @Getter final IAuthTransformerFactory authTransfomerFactory; @@ -50,13 +49,11 @@ public class RequestPipelineOrchestrator { public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, - String diagnosticLabel, RequestContext requestContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : IAuthTransformerFactory.NullAuthTransformerFactory.instance; - this.diagnosticLabel = diagnosticLabel; this.requestContext = requestContext; } @@ -99,8 +96,8 @@ void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) // Note3: ByteBufs will be sent through when there were pending bytes left to be parsed by the // HttpRequestDecoder when the HttpRequestDecoder is removed from the pipeline BEFORE the // NettyDecodedHttpRequestHandler is removed. - pipeline.addLast(new NettyDecodedHttpRequestPreliminaryConvertHandler(transformer, chunkSizes, this, - diagnosticLabel, requestContext)); + pipeline.addLast(new NettyDecodedHttpRequestPreliminaryConvertHandler(transformer, chunkSizes, + this, requestContext)); addLoggingHandler(pipeline, "B"); } @@ -150,7 +147,7 @@ void addBaselineHandlers(ChannelPipeline pipeline) { // OUT: nothing - terminal! ByteBufs are routed to the packet handler! addLoggingHandler(pipeline, "K"); pipeline.addLast(OFFLOADING_HANDLER_NAME, - new NettySendByteBufsToPacketHandlerHandler(packetReceiver, diagnosticLabel)); + new NettySendByteBufsToPacketHandlerHandler(packetReceiver, requestContext)); } private void addLoggingHandler(ChannelPipeline pipeline, String name) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java index 3a8780fbe..419793eca 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -6,6 +6,8 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; +import java.util.StringJoiner; + public class ChannelKeyContext implements IConnectionContext { @Getter final ISourceTrafficChannelKey channelKey; @@ -26,4 +28,9 @@ public String getConnectionId() { public String getNodeId() { return channelKey.getNodeId(); } + + @Override + public String toString() { + return channelKey.toString(); + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java index 29953654c..e90bfdf14 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java @@ -47,4 +47,9 @@ public long replayerRequestIndex() { public ISourceTrafficChannelKey getChannelKey() { return replayerRequestKey.trafficStreamKey; } + + @Override + public String toString() { + return replayerRequestKey.toString(); + } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java index ea1bb6eb9..69e8ad8b0 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java @@ -34,7 +34,7 @@ public void testTransformer() throws Exception { var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformer = new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME); var transformingHandler = new HttpJsonTransformingConsumer(transformer, null, testPacketCapture, - "TEST", TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + "HoSt: " + SOURCE_CLUSTER_NAME + "\r\n" + @@ -86,7 +86,7 @@ public void testMalformedPayloadIsPassedThrough() throws Exception { var httpBasicAuthTransformer = new StaticAuthTransformerFactory("Basic YWRtaW46YWRtaW4="); var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME), - httpBasicAuthTransformer, testPacketCapture, "TEST", + httpBasicAuthTransformer, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, @@ -113,7 +113,7 @@ public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exce var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME, null, "[{\"JsonTransformerForOpenSearch23PlusTargetTransformerProvider\":\"\"}]"), - null, testPacketCapture, "TEST", TestRequestKey.getTestConnectionRequestContext(0)); + null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); Random r = new Random(2); var stringParts = IntStream.range(0, 1).mapToObj(i-> TestUtils.makeRandomString(r, 10)).map(o->(String)o) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index e6f833c7b..05d27763c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -164,12 +164,9 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) new TestFlowController(), timeShifter); for (int j=0; j<2; ++j) { for (int i = 0; i < 2; ++i) { - String connId = "TEST_" + j; - var trafficStreamKey = new PojoTrafficStreamKey("testNodeId", connId, 0); - var requestKey = new UniqueReplayerRequestKey(trafficStreamKey, 0, i); - var ctx = TestRequestKey.getTestConnectionRequestContext(0); + var ctx = TestRequestKey.getTestConnectionRequestContext("TEST_"+i, j); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, - sendingFactory, ctx, Instant.now(), Instant.now(), requestKey, + sendingFactory, ctx, Instant.now(), Instant.now(), ctx.getReplayerRequestKey(), ()->Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); log.info("requestFinishFuture="+requestFinishFuture); var aggregatedResponse = requestFinishFuture.get(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java index f4c7031ff..50c2aaa44 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java @@ -27,7 +27,7 @@ public void testPassThroughSinglePacketPost() throws Exception { var transformingHandler = new HttpJsonTransformingConsumer(new TransformationLoader() .getTransformerFactoryLoader(null), - null, testPacketCapture, "TEST", + null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( @@ -48,7 +48,7 @@ public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Ex var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), - null, testPacketCapture, "TEST", + null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( @@ -73,8 +73,7 @@ public void testRemoveAuthHeadersWorks() throws Exception { var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), - RemovingAuthTransformerFactory.instance, - testPacketCapture, "TEST", + RemovingAuthTransformerFactory.instance, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( @@ -114,7 +113,7 @@ private void walkMaps(Object o) { }); var transformingHandler = new HttpJsonTransformingConsumer(complexTransformer, null, - testPacketCapture, "TEST", TestRequestKey.getTestConnectionRequestContext(0)); + testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 5089e6d7f..a9b232ae4 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -8,11 +8,18 @@ public class TestRequestKey { + public static final String TEST_NODE_ID = "testNodeId"; + public static final String DEFAULT_TEST_CONNECTION = "testConnection"; + private TestRequestKey() {} public static final RequestContext getTestConnectionRequestContext(int replayerIdx) { + return getTestConnectionRequestContext(DEFAULT_TEST_CONNECTION, replayerIdx); + } + + public static final RequestContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( - new PojoTrafficStreamKey("testNodeId", "testConnectionId", 0), + new PojoTrafficStreamKey(TEST_NODE_ID, connectionId, 0), 0, replayerIdx); var smc = new SimpleMeteringClosure("test"); var channelKeyContext = new ChannelKeyContext(rk.trafficStreamKey, smc.makeSpanContinuation("test", null)); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index 59d4576f2..3439bbf8f 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -141,7 +141,7 @@ static void runPipelineAndValidate(IJsonTransformer transformer, var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), new AggregatedRawResponse(-1, Duration.ZERO, new ArrayList<>(), null)); var transformingHandler = new HttpJsonTransformingConsumer<>(transformer, authTransformer, testPacketCapture, - "TEST", TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext("TEST_CONNECTION", 0)); var contentLength = stringParts.stream().mapToInt(String::length).sum(); var headerString = "GET / HTTP/1.1\r\n" + From c14da6a4f170a3a0ef70fb84853de9e776053dd4 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 12 Dec 2023 10:08:58 -0500 Subject: [PATCH 19/94] Update the Http Logging Handler to suppress response packet captures when the request was ignored and remove the now-unused file for responses. I'd like to revisit this eventually to make sure that it's as efficient as possible and to organize it better. However, it does get the job done for now and tests were updated to confirm. Signed-off-by: Greg Schohn --- .../netty/LoggingHttpRequestHandler.java | 92 +++++++++++++------ .../netty/LoggingHttpResponseHandler.java | 76 --------------- ...ReliableLoggingHttpRequestHandlerTest.java | 64 +++++++------ .../netty/ProxyChannelInitializer.java | 3 - 4 files changed, 102 insertions(+), 133 deletions(-) delete mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java index f941acf3c..5c1193720 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java @@ -41,11 +41,40 @@ public class LoggingHttpRequestHandler extends ChannelDuplexHandler { public static final String GATHERING_RESPONSE = "gatheringResponse"; public static final String BLOCKED = "blocked"; + static class CaptureIgnoreState { + static final byte CAPTURE = 0; + static final byte IGNORE_REQUEST = 1; + static final byte IGNORE_RESPONSE = 2; + private CaptureIgnoreState() {} + } + + static class CaptureState { + byte captureIgnoreState = CaptureIgnoreState.CAPTURE; + boolean liveReadObservationsInOffloader = false; + + boolean shouldCapture() { + return captureIgnoreState == CaptureIgnoreState.CAPTURE; + } + + public void setShouldCaptureForRequest(boolean b) { + captureIgnoreState = b ? CaptureIgnoreState.CAPTURE : CaptureIgnoreState.IGNORE_REQUEST; + } + + public void advanceStateModelIntoResponseGather() { + if (CaptureIgnoreState.CAPTURE != captureIgnoreState) { + captureIgnoreState = CaptureIgnoreState.IGNORE_RESPONSE; + } + } + } + static class SimpleHttpRequestDecoder extends HttpRequestDecoder { private final PassThruHttpHeaders.HttpHeadersToPreserve headersToPreserve; + private final CaptureState captureState; - public SimpleHttpRequestDecoder(@NonNull PassThruHttpHeaders.HttpHeadersToPreserve headersToPreserve) { + public SimpleHttpRequestDecoder(@NonNull PassThruHttpHeaders.HttpHeadersToPreserve headersToPreserve, + CaptureState captureState) { this.headersToPreserve = headersToPreserve; + this.captureState = captureState; } /** @@ -60,34 +89,40 @@ public HttpMessage createMessage(String[] initialLine) throws Exception { , new PassThruHttpHeaders(headersToPreserve) ); } - } + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (captureState.captureIgnoreState == CaptureIgnoreState.IGNORE_RESPONSE) { + captureState.captureIgnoreState = CaptureIgnoreState.CAPTURE; + } + super.channelRead(ctx, msg); + } + } + static class SimpleDecodedHttpRequestHandler extends ChannelInboundHandlerAdapter { @Getter private HttpRequest currentRequest; final RequestCapturePredicate requestCapturePredicate; - boolean isDone; - boolean shouldCapture; - boolean liveReadObservationsInOffloader; - - SimpleDecodedHttpRequestHandler(RequestCapturePredicate requestCapturePredicate) { + boolean haveParsedFullRequest; + final CaptureState captureState; + + SimpleDecodedHttpRequestHandler(RequestCapturePredicate requestCapturePredicate, CaptureState captureState) { this.requestCapturePredicate = requestCapturePredicate; this.currentRequest = null; - this.isDone = false; - this.shouldCapture = true; - liveReadObservationsInOffloader = false; + this.haveParsedFullRequest = false; + this.captureState = captureState; } @Override public void channelRead(@NonNull ChannelHandlerContext ctx, @NonNull Object msg) throws Exception { if (msg instanceof HttpRequest) { currentRequest = (HttpRequest) msg; - shouldCapture = RequestCapturePredicate.CaptureDirective.CAPTURE == - requestCapturePredicate.apply((HttpRequest) msg); + captureState.setShouldCaptureForRequest(RequestCapturePredicate.CaptureDirective.CAPTURE == + requestCapturePredicate.apply((HttpRequest) msg)); } else if (msg instanceof HttpContent) { ((HttpContent)msg).release(); if (msg instanceof LastHttpContent) { - isDone = true; + haveParsedFullRequest = true; } } else { super.channelRead(ctx, msg); @@ -95,11 +130,9 @@ public void channelRead(@NonNull ChannelHandlerContext ctx, @NonNull Object msg) } public HttpRequest resetCurrentRequest() { - this.shouldCapture = true; - this.isDone = false; + this.haveParsedFullRequest = false; var old = currentRequest; this.currentRequest = null; - this.liveReadObservationsInOffloader = false; return old; } } @@ -122,13 +155,13 @@ public LoggingHttpRequestHandler(String nodeId, String channelKey, METERING_CLOSURE.meterIncrementEvent(messageContext, "requestStarted"); this.trafficOffloader = trafficOffloaderFactory.createOffloader(parentContext, channelKey); + var captureState = new CaptureState(); httpDecoderChannel = new EmbeddedChannel( - new SimpleHttpRequestDecoder(httpHeadersCapturePredicate.getHeadersRequiredForMatcher()), - new SimpleDecodedHttpRequestHandler(httpHeadersCapturePredicate) + new SimpleHttpRequestDecoder(httpHeadersCapturePredicate.getHeadersRequiredForMatcher(), captureState), + new SimpleDecodedHttpRequestHandler(httpHeadersCapturePredicate, captureState) ); } - static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state) { switch (state) { case REQUEST: @@ -217,16 +250,16 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception httpDecoderChannel.writeInbound(bb.retainedDuplicate()); // the ByteBuf is consumed/release by this method METERING_CLOSURE.meterIncrementEvent(messageContext, - getHandlerThatHoldsParsedHttpRequest().isDone ? "requestFullyParsed" : "requestPartiallyParsed"); + getHandlerThatHoldsParsedHttpRequest().haveParsedFullRequest ? "requestFullyParsed" : "requestPartiallyParsed"); - var shouldCapture = requestParsingHandler.shouldCapture; + var captureState = requestParsingHandler.captureState; + var shouldCapture = captureState.shouldCapture(); if (shouldCapture) { - requestParsingHandler.liveReadObservationsInOffloader = true; + captureState.liveReadObservationsInOffloader = true; trafficOffloader.addReadEvent(timestamp, bb); - - } else if (requestParsingHandler.liveReadObservationsInOffloader) { + } else if (captureState.liveReadObservationsInOffloader) { trafficOffloader.cancelCaptureForCurrentRequest(timestamp); - requestParsingHandler.liveReadObservationsInOffloader = false; + captureState.liveReadObservationsInOffloader = false; } metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) @@ -235,9 +268,12 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception METERING_CLOSURE.meterIncrementEvent(messageContext, "readBytes", bb.readableBytes()); - if (requestParsingHandler.isDone) { + if (requestParsingHandler.haveParsedFullRequest) { messageContext.getCurrentSpan().end(); var httpRequest = requestParsingHandler.resetCurrentRequest(); + captureState.liveReadObservationsInOffloader = false; + captureState.advanceStateModelIntoResponseGather(); + if (shouldCapture) { var decoderResultLoose = httpRequest.decoderResult(); if (decoderResultLoose instanceof HttpMessageDecoderResult) { @@ -260,7 +296,9 @@ public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) rotateNextMessageContext(HttpMessageContext.HttpTransactionState.RESPONSE); } var bb = (ByteBuf) msg; - trafficOffloader.addWriteEvent(Instant.now(), bb); + if (getHandlerThatHoldsParsedHttpRequest().captureState.shouldCapture()) { + trafficOffloader.addWriteEvent(Instant.now(), bb); + } metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); METERING_CLOSURE.meterIncrementEvent(messageContext, "write"); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java deleted file mode 100644 index 50d263550..000000000 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpResponseHandler.java +++ /dev/null @@ -1,76 +0,0 @@ -package org.opensearch.migrations.trafficcapture.netty; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelOutboundHandlerAdapter; -import io.netty.channel.ChannelPromise; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.coreutils.MetricsAttributeKey; -import org.opensearch.migrations.coreutils.MetricsEvent; -import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; -import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; - -import java.time.Instant; - -@Slf4j -public class LoggingHttpResponseHandler extends ChannelOutboundHandlerAdapter { - public static final String TELEMETRY_SCOPE_NAME = "LoggingHttpOutboundHandler"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpResponseHandler"); - - private final IChannelConnectionCaptureSerializer trafficOffloader; - private ConnectionContext telemetryContext; - - public LoggingHttpResponseHandler(ConnectionContext incomingContext, - IChannelConnectionCaptureSerializer trafficOffloader) { - this.trafficOffloader = trafficOffloader; - this.telemetryContext = incomingContext; - } -// -// @Override -// public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) throws Exception { -// trafficOffloader.addConnectEvent(Instant.now(), remoteAddress, localAddress); -// -// telemetryContext = new ConnectionContext(telemetryContext, -// METERING_CLOSURE.makeSpanContinuation("backendConnection")); -// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "connect"); -// METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", 1); -// -// super.connect(ctx, remoteAddress, localAddress, promise); -// } - -// @Override -// public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { -// trafficOffloader.addDisconnectEvent(Instant.now()); -// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "disconnect"); -// super.disconnect(ctx, promise); -// } - -// @Override -// public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { -// trafficOffloader.addCloseEvent(Instant.now()); -// -// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "close"); -// METERING_CLOSURE.meterDeltaEvent(telemetryContext, "connections", -1); -// METERING_CLOSURE.meterHistogramMillis(telemetryContext, "connectionDuration"); -// telemetryContext.currentSpan.end(); -// } - -// @Override -// public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { -// trafficOffloader.addDeregisterEvent(Instant.now()); -// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "deregister"); -// super.deregister(ctx, promise); -// } - -// -// @Override -// public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { -// trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); -// METERING_CLOSURE.meterIncrementEvent(telemetryContext, "exception"); -// super.exceptionCaught(ctx, cause); -// } - -} diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java index b5a8936d3..6fe532efe 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java @@ -4,11 +4,9 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; -import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; -import lombok.AllArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; @@ -23,17 +21,17 @@ import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.SequenceInputStream; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; @@ -41,6 +39,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Collectors; +import java.util.stream.Stream; @Slf4j public class ConditionallyReliableLoggingHttpRequestHandlerTest { @@ -185,18 +184,22 @@ public void testThatHealthCheckCaptureCanBeSuppressed(boolean singleBytes) throw var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", ".*uploader.*")); EmbeddedChannel channel = new EmbeddedChannel( new ConditionallyReliableLoggingHttpRequestHandler("n", "c", - (ctx, connectionId) -> offloader, headerCapturePredicate, x->true)); + (ctx, connectionId) -> offloader, headerCapturePredicate, x->false)); getWriter(singleBytes, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); + channel.writeOutbound(Unpooled.wrappedBuffer("response1".getBytes(StandardCharsets.UTF_8))); getWriter(singleBytes, true, SimpleRequests.SMALL_POST.getBytes(StandardCharsets.UTF_8)).accept(channel); + var bytesForResponsePreserved = "response2".getBytes(StandardCharsets.UTF_8); + channel.writeOutbound(Unpooled.wrappedBuffer(bytesForResponsePreserved)); + channel.close(); var requestBytes = (SimpleRequests.HEALTH_CHECK + SimpleRequests.SMALL_POST).getBytes(StandardCharsets.UTF_8); // we wrote the correct data to the downstream handler/channel - var outputData = new SequenceInputStream(Collections.enumeration(channel.inboundMessages().stream() + var consumedData = new SequenceInputStream(Collections.enumeration(channel.inboundMessages().stream() .map(m->new ByteArrayInputStream(consumeIntoArray((ByteBuf)m))) .collect(Collectors.toList()))) .readAllBytes(); - log.info("outputdata = " + new String(outputData, StandardCharsets.UTF_8)); - Assertions.assertArrayEquals(requestBytes, outputData); + log.info("captureddata = " + new String(consumedData, StandardCharsets.UTF_8)); + Assertions.assertArrayEquals(requestBytes, consumedData); Assertions.assertNotNull(streamMgr.byteBufferAtomicReference, "This would be null if the handler didn't block until the output was written"); @@ -206,34 +209,41 @@ public void testThatHealthCheckCaptureCanBeSuppressed(boolean singleBytes) throw trafficStream.getSubStream(0).hasRead()); Assertions.assertEquals(1, streamMgr.flushCount.get()); var observations = trafficStream.getSubStreamList(); - if (singleBytes) { - var sawRequestDropped = new AtomicBoolean(false); - var observationsAfterDrop = observations.stream().dropWhile(o->{ - var wasDrop = o.hasRequestDropped(); - sawRequestDropped.compareAndSet(false, wasDrop); - return !sawRequestDropped.get() || wasDrop; - }).collect(Collectors.toList()); + { + var readObservationStreamToUse = singleBytes ? skipReadsBeforeDrop(observations) : observations.stream(); var combinedTrafficPacketsSteam = - new SequenceInputStream(Collections.enumeration(observationsAfterDrop.stream() - .filter(to->to.hasRead()) - .map(to->new ByteArrayInputStream(to.getRead().getData().toByteArray())) + new SequenceInputStream(Collections.enumeration(readObservationStreamToUse + .filter(to -> to.hasRead()) + .map(to -> new ByteArrayInputStream(to.getRead().getData().toByteArray())) .collect(Collectors.toList()))); + var reconstitutedTrafficStreamReads = combinedTrafficPacketsSteam.readAllBytes(); Assertions.assertArrayEquals(SimpleRequests.SMALL_POST.getBytes(StandardCharsets.UTF_8), - combinedTrafficPacketsSteam.readAllBytes()); - } else { + reconstitutedTrafficStreamReads); + } + + // check that we only got one response + { var combinedTrafficPacketsSteam = new SequenceInputStream(Collections.enumeration(observations.stream() - .filter(to->to.hasRead()) - .map(to->new ByteArrayInputStream(to.getRead().getData().toByteArray())) + .filter(to->to.hasWrite()) + .map(to->new ByteArrayInputStream(to.getWrite().getData().toByteArray())) .collect(Collectors.toList()))); - var reconstitutedTrafficStreamReads = combinedTrafficPacketsSteam.readAllBytes(); - log.info("reconstitutedTrafficStreamReads="+ - new String(reconstitutedTrafficStreamReads, StandardCharsets.UTF_8)); - Assertions.assertArrayEquals(SimpleRequests.SMALL_POST.getBytes(StandardCharsets.UTF_8), - reconstitutedTrafficStreamReads); + var reconstitutedTrafficStreamWrites = combinedTrafficPacketsSteam.readAllBytes(); + log.info("reconstitutedTrafficStreamWrites="+ + new String(reconstitutedTrafficStreamWrites, StandardCharsets.UTF_8)); + Assertions.assertArrayEquals(bytesForResponsePreserved, reconstitutedTrafficStreamWrites); } } + private static Stream skipReadsBeforeDrop(List observations) { + var sawRequestDropped = new AtomicBoolean(false); + return observations.stream().dropWhile(o->{ + var wasDrop = o.hasRequestDropped(); + sawRequestDropped.compareAndSet(false, wasDrop); + return !sawRequestDropped.get() || wasDrop; + }); + } + private Consumer getWriter(boolean singleBytes, boolean usePool, byte[] bytes) { if (singleBytes) { return getSingleByteAtATimeWriter(usePool, bytes); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index d9dc29a63..449abcc5e 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -5,14 +5,11 @@ import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.ssl.SslHandler; -import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import lombok.NonNull; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpRequestHandler; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; -import org.opensearch.migrations.trafficcapture.netty.LoggingHttpResponseHandler; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import javax.net.ssl.SSLEngine; import java.io.IOException; From 7de4009181f92f713c54627cc71718131347ca18 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 12 Dec 2023 10:10:01 -0500 Subject: [PATCH 20/94] File rename since the LoggingHttpRequest handler now handles both requests and responses. Signed-off-by: Greg Schohn --- ...nditionallyReliableLoggingHttpHandler.java} | 18 +++++------------- ...estHandler.java => LoggingHttpHandler.java} | 8 ++++---- ...ionallyReliableLoggingHttpHandlerTest.java} | 8 ++++---- .../netty/ProxyChannelInitializer.java | 4 ++-- 4 files changed, 15 insertions(+), 23 deletions(-) rename TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/{ConditionallyReliableLoggingHttpRequestHandler.java => ConditionallyReliableLoggingHttpHandler.java} (74%) rename TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/{LoggingHttpRequestHandler.java => LoggingHttpHandler.java} (97%) rename TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/{ConditionallyReliableLoggingHttpRequestHandlerTest.java => ConditionallyReliableLoggingHttpHandlerTest.java} (97%) diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java similarity index 74% rename from TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java rename to TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index a641b4bfe..05a31e8cd 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -3,31 +3,23 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.HttpRequest; import io.netty.util.ReferenceCountUtil; -import io.opentelemetry.api.trace.Span; -import lombok.Getter; import lombok.Lombok; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; -import java.time.Instant; -import java.util.function.Function; import java.util.function.Predicate; @Slf4j -public class ConditionallyReliableLoggingHttpRequestHandler extends LoggingHttpRequestHandler { +public class ConditionallyReliableLoggingHttpHandler extends LoggingHttpHandler { private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpRequestHandler(@NonNull String nodeId, String connectionId, - @NonNull IConnectionCaptureFactory trafficOffloaderFactory, - @NonNull RequestCapturePredicate requestCapturePredicate, - @NonNull Predicate headerPredicateForWhenToBlock) + public ConditionallyReliableLoggingHttpHandler(@NonNull String nodeId, String connectionId, + @NonNull IConnectionCaptureFactory trafficOffloaderFactory, + @NonNull RequestCapturePredicate requestCapturePredicate, + @NonNull Predicate headerPredicateForWhenToBlock) throws IOException { super(nodeId, connectionId, trafficOffloaderFactory, requestCapturePredicate); this.shouldBlockPredicate = headerPredicateForWhenToBlock; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java similarity index 97% rename from TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java rename to TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 5c1193720..05061cdcc 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpRequestHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -32,7 +32,7 @@ import java.time.Instant; @Slf4j -public class LoggingHttpRequestHandler extends ChannelDuplexHandler { +public class LoggingHttpHandler extends ChannelDuplexHandler { public static final String TELEMETRY_SCOPE_NAME = "CapturingHttpHandler"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); @@ -143,9 +143,9 @@ public HttpRequest resetCurrentRequest() { protected HttpMessageContext messageContext; - public LoggingHttpRequestHandler(String nodeId, String channelKey, - @NonNull IConnectionCaptureFactory trafficOffloaderFactory, - @NonNull RequestCapturePredicate httpHeadersCapturePredicate) + public LoggingHttpHandler(String nodeId, String channelKey, + @NonNull IConnectionCaptureFactory trafficOffloaderFactory, + @NonNull RequestCapturePredicate httpHeadersCapturePredicate) throws IOException { var parentContext = new ConnectionContext(channelKey, nodeId, METERING_CLOSURE.makeSpanContinuation("connectionLifetime", null)); diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java similarity index 97% rename from TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java rename to TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java index 6fe532efe..da9c1ea8d 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpRequestHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java @@ -42,7 +42,7 @@ import java.util.stream.Stream; @Slf4j -public class ConditionallyReliableLoggingHttpRequestHandlerTest { +public class ConditionallyReliableLoggingHttpHandlerTest { @RegisterExtension static final OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); private final Tracer tracer = otelTesting.getOpenTelemetry().getTracer("test"); @@ -86,7 +86,7 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer offloader, + new ConditionallyReliableLoggingHttpHandler("n", "c", (ctx, connectionId) -> offloader, new RequestCapturePredicate(), x->true)); // true: block every request channelWriter.accept(channel); @@ -159,7 +159,7 @@ public void testThatSuppressedCaptureWorks() throws Exception { var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", "uploader")); EmbeddedChannel channel = new EmbeddedChannel( - new ConditionallyReliableLoggingHttpRequestHandler("n", "c", + new ConditionallyReliableLoggingHttpHandler("n", "c", (ctx, connectionId) -> offloader, headerCapturePredicate, x->true)); getWriter(false, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); channel.close(); @@ -183,7 +183,7 @@ public void testThatHealthCheckCaptureCanBeSuppressed(boolean singleBytes) throw var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", ".*uploader.*")); EmbeddedChannel channel = new EmbeddedChannel( - new ConditionallyReliableLoggingHttpRequestHandler("n", "c", + new ConditionallyReliableLoggingHttpHandler("n", "c", (ctx, connectionId) -> offloader, headerCapturePredicate, x->false)); getWriter(singleBytes, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); channel.writeOutbound(Unpooled.wrappedBuffer("response1".getBytes(StandardCharsets.UTF_8))); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 449abcc5e..6ff60ee50 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -8,7 +8,7 @@ import org.opensearch.migrations.tracing.SimpleMeteringClosure; import lombok.NonNull; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; -import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpRequestHandler; +import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpHandler; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; import javax.net.ssl.SSLEngine; @@ -48,7 +48,7 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpRequestHandler("n", "c", + ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler("n", "c", connectionCaptureFactory, requestCapturePredicate, this::shouldGuaranteeMessageOffloading)); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); } From a5bfc7d92f6e68a57d3f8c64a71b1a6d6aa48efe Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 15 Dec 2023 10:26:40 -0500 Subject: [PATCH 21/94] Shuffling lots of details of Contexts and the relationships between different levels of them. This has the minimal amount of work to get those relationships to simply compile. Nearly every unit test fails and the code is more clunky than it needs to be, but getting to this point was alone a major lift. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 2 +- .../migrations/tracing/IWithAttributes.java | 4 +- .../tracing/IWithStartTimeAndAttributes.java | 2 +- .../tracing/IWithTypedEnclosingScope.java | 5 + .../commoncontexts/IConnectionContext.java | 5 +- ...text.java => IHttpTransactionContext.java} | 2 +- .../netty/tracing/HttpMessageContext.java | 5 +- .../migrations/replay/Accumulation.java | 29 +--- .../replay/AccumulationCallbacks.java | 22 ++- ...edTrafficToHttpTransactionAccumulator.java | 89 +++++++--- .../replay/ClientConnectionPool.java | 12 +- .../replay/PacketConsumerFactory.java | 6 +- ...acketToTransformingHttpHandlerFactory.java | 7 +- .../migrations/replay/ReplayEngine.java | 14 +- .../replay/RequestResponsePacketPair.java | 60 ++++++- .../replay/RequestSenderOrchestrator.java | 34 ++-- .../replay/TrafficCaptureSourceFactory.java | 7 +- .../migrations/replay/TrafficReplayer.java | 30 ++-- .../NettyPacketToHttpConsumer.java | 18 +- .../http/HttpJsonTransformingConsumer.java | 27 ++- ...dHttpRequestPreliminaryConvertHandler.java | 15 +- ...ttySendByteBufsToPacketHandlerHandler.java | 11 +- .../http/RequestPipelineOrchestrator.java | 12 +- .../datatypes/ConnectionReplaySession.java | 4 +- .../datatypes/ISourceTrafficChannelKey.java | 6 +- .../replay/datatypes/ITrafficStreamKey.java | 3 + .../datatypes/PojoTrafficStreamKey.java | 8 + .../datatypes/UniqueReplayerRequestKey.java | 4 +- .../kafka/KafkaTrafficCaptureSource.java | 38 +++-- .../replay/kafka/TrackingKafkaConsumer.java | 40 +++-- .../TrafficStreamKeyWithKafkaRecordId.java | 23 ++- .../tracing/AbstractNestedSpanContext.java | 39 +++++ .../replay/tracing/ChannelContextManager.java | 39 +++++ .../replay/tracing/ChannelKeyContext.java | 35 ++-- .../migrations/replay/tracing/Contexts.java | 159 ++++++++++++++++++ .../tracing/DirectNestedSpanContext.java | 18 ++ .../replay/tracing/IChannelKeyContext.java | 18 ++ .../migrations/replay/tracing/IContexts.java | 79 +++++++++ .../tracing/IReplayerRequestContext.java | 17 -- .../tracing/IndirectNestedSpanContext.java | 13 ++ .../replay/tracing/RequestContext.java | 55 ------ .../replay/FullTrafficReplayerTest.java | 7 +- .../replay/RequestSenderOrchestratorTest.java | 2 +- .../replay/ResultsToLogsConsumerTest.java | 8 +- ...afficToHttpTransactionAccumulatorTest.java | 27 +-- .../replay/TrafficReplayerTest.java | 45 ++--- .../NettyPacketToHttpConsumerTest.java | 4 - .../kafka/KafkaTrafficCaptureSourceTest.java | 5 +- .../migrations/replay/TestRequestKey.java | 14 +- .../TestTrafficStreamsLifecycleContext.java | 33 ++++ 50 files changed, 821 insertions(+), 340 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/{IRequestContext.java => IHttpTransactionContext.java} (87%) create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java create mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 140cfb601..5308902fd 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -13,7 +13,7 @@ import java.time.Instant; @AllArgsConstructor -public class KafkaRecordContext implements IWithAttributes, IWithStartTime { +public class KafkaRecordContext implements IWithAttributes, IWithStartTime { static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); static final AttributeKey RECORD_SIZE_ATTR = AttributeKey.longKey("recordSize"); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java index 6f4f244b0..8b2f971ca 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java @@ -6,8 +6,8 @@ import java.util.ArrayList; -public interface IWithAttributes { - T getEnclosingScope(); +public interface IWithAttributes { + IWithAttributes getEnclosingScope(); Span getCurrentSpan(); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 31be5b08c..0f490b6c8 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -1,4 +1,4 @@ package org.opensearch.migrations.tracing; -public interface IWithStartTimeAndAttributes extends IWithStartTime, IWithAttributes { +public interface IWithStartTimeAndAttributes extends IWithStartTime, IWithAttributes { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java new file mode 100644 index 000000000..4cad5302a --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java @@ -0,0 +1,5 @@ +package org.opensearch.migrations.tracing; + +public interface IWithTypedEnclosingScope { + T getLogicalEnclosingScope(); +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 83476b9fa..bd3781b22 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -2,10 +2,9 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import org.opensearch.migrations.tracing.EmptyContext; import org.opensearch.migrations.tracing.IWithAttributes; -public interface IConnectionContext extends IWithAttributes { +public interface IConnectionContext extends IWithAttributes { static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); @@ -13,7 +12,7 @@ public interface IConnectionContext extends IWithAttributes { String getNodeId(); @Override - default EmptyContext getEnclosingScope() { return EmptyContext.singleton; } + default IWithAttributes getEnclosingScope() { return null; } @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IRequestContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java similarity index 87% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IRequestContext.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index c6b932551..0b610a262 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IRequestContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -4,7 +4,7 @@ import io.opentelemetry.api.common.AttributesBuilder; import org.opensearch.migrations.tracing.IWithAttributes; -public interface IRequestContext extends IWithAttributes { +public interface IHttpTransactionContext extends IWithAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); long getSourceRequestIndex(); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index e406c4225..1920ee567 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -1,17 +1,16 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; import io.opentelemetry.api.trace.Span; -import lombok.EqualsAndHashCode; import lombok.Getter; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.commoncontexts.IRequestContext; +import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.time.Instant; -public class HttpMessageContext implements IRequestContext, IWithStartTimeAndAttributes { +public class HttpMessageContext implements IHttpTransactionContext, IWithStartTimeAndAttributes { public enum HttpTransactionState { REQUEST, INTERNALLY_BLOCKED, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index ca239a472..34b5c9a92 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -1,12 +1,12 @@ package org.opensearch.migrations.replay; import lombok.NonNull; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.time.Instant; @@ -27,8 +27,7 @@ enum State { ACCUMULATING_WRITES } - public final ISourceTrafficChannelKey trafficChannelKey; - public final ChannelKeyContext channelContext; + public final ITrafficStreamKey trafficChannelKey; private RequestResponsePacketPair rrPair; AtomicLong newestPacketTimestampInMillis; State state; @@ -52,8 +51,6 @@ public Accumulation(@NonNull ITrafficStreamKey trafficChannelKey, this.startingSourceRequestIndex = startingSourceRequestIndex; this.state = dropObservationsLeftoverFromPrevious ? State.IGNORING_LAST_REQUEST : State.WAITING_FOR_NEXT_READ_CHUNK; - channelContext = new ChannelKeyContext(trafficChannelKey, - METERING_CLOSURE.makeSpanContinuation("processingChannel", null)); } public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey forTrafficStreamKey) { @@ -61,19 +58,11 @@ public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey fo return rrPair; } this.rrPair = new RequestResponsePacketPair(forTrafficStreamKey, - new RequestContext(channelContext, getRequestKey(forTrafficStreamKey), - METERING_CLOSURE.makeSpanContinuation("accumulatingRequest"))); + startingSourceRequestIndex, getIndexOfCurrentRequest()); + //this.rrPair.getRequestContext() return rrPair; } - public UniqueReplayerRequestKey getRequestKey() { - return getRequestKey(getRrPair().getBeginningTrafficStreamKey()); - } - - private UniqueReplayerRequestKey getRequestKey(@NonNull ITrafficStreamKey tsk) { - return new UniqueReplayerRequestKey(tsk, startingSourceRequestIndex, getIndexOfCurrentRequest()); - } - public boolean hasSignaledRequests() { return numberOfResets.get() > 0 || state == Accumulation.State.ACCUMULATING_WRITES; } @@ -92,14 +81,6 @@ public boolean hasRrPair() { return rrPair; } - public void rotateRequestGatheringToResponse() { - var ctx = rrPair.requestContext; - ctx.getCurrentSpan().end(); - rrPair.requestContext = new RequestContext(ctx.getEnclosingScope(), - ctx.getReplayerRequestKey(), - METERING_CLOSURE.makeSpanContinuation("accumulatingResponse")); - } - public Instant getLastTimestamp() { return Instant.ofEpochMilli(newestPacketTimestampInMillis.get()); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index 28d988f3d..c46924521 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -4,21 +4,27 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import java.time.Instant; import java.util.List; public interface AccumulationCallbacks { - void onRequestReceived(@NonNull UniqueReplayerRequestKey key, RequestContext ctx, + void onRequestReceived(@NonNull UniqueReplayerRequestKey key, + IContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request); - void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, RequestContext ctx, + void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, + IContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrpp); - void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, ChannelKeyContext ctx, + void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, + IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); - void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, ChannelKeyContext ctx, - RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, + void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, + IChannelKeyContext ctx, + RequestResponsePacketPair.ReconstructionStatus status, + @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); - void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ChannelKeyContext ctx); + void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IChannelKeyContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index b78a72231..04eb1a7a8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -1,11 +1,14 @@ package org.opensearch.migrations.replay; +import lombok.AllArgsConstructor; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -52,7 +55,7 @@ public class CapturedTrafficToHttpTransactionAccumulator { public static final Duration EXPIRATION_GRANULARITY = Duration.ofSeconds(1); private final ExpiringTrafficStreamMap liveStreams; - private final AccumulationCallbacks listener; + private final SpanWrappingAccumulationCallbacks listener; private final AtomicInteger requestCounter = new AtomicInteger(); private final AtomicInteger reusedKeepAliveCounter = new AtomicInteger(); @@ -93,9 +96,52 @@ public void onExpireAccumulation(String partitionId, Accumulation accumulation) RequestResponsePacketPair.ReconstructionStatus.EXPIRED_PREMATURELY); } }); - this.listener = accumulationCallbacks; + this.listener = new SpanWrappingAccumulationCallbacks(accumulationCallbacks); } + @AllArgsConstructor + private static class SpanWrappingAccumulationCallbacks { + private final AccumulationCallbacks underlying; + public void onRequestReceived(@NonNull Accumulation accum, + IContexts.IRequestAccumulationContext requestCtx, + @NonNull HttpMessageAndTimestamp request) { + requestCtx.getCurrentSpan().end(); + underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), + requestCtx.getLogicalEnclosingScope(), request); + } + + public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, + @NonNull RequestResponsePacketPair rrpp) { + var responseCtx = rrpp.getResponseContext(); + responseCtx.getCurrentSpan().end(); + underlying.onFullDataReceived(key, responseCtx.getLogicalEnclosingScope(), rrpp); + } + + public void onConnectionClose(@NonNull Accumulation accum, + RequestResponsePacketPair.ReconstructionStatus status, + @NonNull Instant when, + @NonNull List trafficStreamKeysBeingHeld) { + var tsCtx = accum.trafficChannelKey.getTrafficStreamsContext(); + tsCtx.getCurrentSpan().end(); + underlying.onConnectionClose(accum.trafficChannelKey, + accum.startingSourceRequestIndex + accum.startingSourceRequestIndex, + tsCtx, status, when, trafficStreamKeysBeingHeld); + } + + public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, + IContexts.ITrafficStreamsLifecycleContext tsCtx, + @NonNull List trafficStreamKeysBeingHeld) { + tsCtx.getCurrentSpan().end(); + underlying.onTrafficStreamsExpired(status, tsCtx.getLogicalEnclosingScope(), trafficStreamKeysBeingHeld); + } + + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) { + var tsCtx = tsk.getTrafficStreamsContext(); + tsCtx.getCurrentSpan().end(); + underlying.onTrafficStreamIgnored(tsk, tsk.getTrafficStreamsContext()); + } + }; + public int numberOfConnectionsCreated() { return liveStreams.numberOfConnectionsCreated(); } public int numberOfRequestsOnReusedConnections() { return reusedKeepAliveCounter.get(); } public int numberOfConnectionsClosed() { return closedConnectionCounter.get(); } @@ -145,7 +191,7 @@ public void accept(ITrafficStreamWithKey trafficStreamAndKey) { assert accum.state == Accumulation.State.WAITING_FOR_NEXT_READ_CHUNK || accum.state == Accumulation.State.IGNORING_LAST_REQUEST || trafficStream.getSubStreamCount() == 0; - listener.onTrafficStreamIgnored(tsk, accum.channelContext); + listener.onTrafficStreamIgnored(tsk); } } @@ -223,9 +269,7 @@ private static List getTrafficStreamsHeldByAccum(Accumulation accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); closedConnectionCounter.incrementAndGet(); - accum.channelContext.getCurrentSpan().end(); - listener.onConnectionClose(accum.trafficChannelKey, accum.getIndexOfCurrentRequest(), accum.channelContext, - RequestResponsePacketPair.ReconstructionStatus.COMPLETE, + listener.onConnectionClose(accum, RequestResponsePacketPair.ReconstructionStatus.COMPLETE, timestamp, getTrafficStreamsHeldByAccum(accum)); return Optional.of(CONNECTION_STATUS.CLOSED); } else if (observation.hasConnectionException()) { @@ -268,6 +312,7 @@ private Optional handleObservationForReadState(@NonNull Accum rrPair.requestData = new HttpMessageAndTimestamp.Request(timestamp); requestCounter.incrementAndGet(); } + rrPair.addRequestData(timestamp, observation.getRead().getData().toByteArray()); rrPair.requestData.addSegment(observation.getReadSegment().getData().toByteArray()); log.atTrace().setMessage(()->"Added request segment for accum[" + connectionId + "]=" + accum).log(); } else if (observation.hasSegmentEnd()) { @@ -322,8 +367,7 @@ private Optional handleObservationForWriteState(Accumulation private void handleDroppedRequestForAccumulation(Accumulation accum) { if (accum.hasRrPair()) { var rrPair = accum.getRrPair(); - rrPair.getTrafficStreamsHeld().forEach(ts-> - listener.onTrafficStreamIgnored(ts, rrPair.requestContext.getEnclosingScope())); + rrPair.getTrafficStreamsHeld().forEach(tsk->listener.onTrafficStreamIgnored(tsk)); } log.atTrace().setMessage(()->"resetting to forget "+ accum.trafficChannelKey).log(); accum.resetToIgnoreAndForgetCurrentRequest(); @@ -361,25 +405,29 @@ private boolean handleEndOfRequest(Accumulation accumulation) { var rrPair = accumulation.getRrPair(); var requestPacketBytes = rrPair.requestData; metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) - .setAttribute(MetricsAttributeKey.REQUEST_ID, accumulation.getRequestKey().toString()) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); + .setAttribute(MetricsAttributeKey.REQUEST_ID, + rrPair.getRequestContext().getLogicalEnclosingScope().getReplayerRequestKey().toString()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, + rrPair.getRequestContext().getLogicalEnclosingScope().getLogicalEnclosingScope().getConnectionId()).emit(); assert (requestPacketBytes != null); assert (!requestPacketBytes.hasInProgressSegment()); - accumulation.rotateRequestGatheringToResponse(); - listener.onRequestReceived(accumulation.getRequestKey(), rrPair.requestContext, requestPacketBytes); + var requestCtx = rrPair.getRequestContext(); + rrPair.rotateRequestGatheringToResponse(); + listener.onRequestReceived(accumulation, requestCtx, requestPacketBytes); accumulation.state = Accumulation.State.ACCUMULATING_WRITES; return true; } private void handleEndOfResponse(Accumulation accumulation, RequestResponsePacketPair.ReconstructionStatus status) { assert accumulation.state == Accumulation.State.ACCUMULATING_WRITES; - metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) - .setAttribute(MetricsAttributeKey.REQUEST_ID, accumulation.getRequestKey().toString()) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, accumulation.getRequestKey().getTrafficStreamKey().getConnectionId()).emit(); var rrPair = accumulation.getRrPair(); + var requestKey = rrPair.getRequestContext().getLogicalEnclosingScope().getReplayerRequestKey(); + metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) + .setAttribute(MetricsAttributeKey.REQUEST_ID, requestKey.toString()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, + requestKey.getTrafficStreamKey().getConnectionId()).emit(); rrPair.completionStatus = status; - rrPair.requestContext.getCurrentSpan().end(); - listener.onFullDataReceived(accumulation.getRequestKey(), rrPair.requestContext, rrPair); + listener.onFullDataReceived(requestKey, rrPair); log.atTrace().setMessage("resetting for end of response").log(); accumulation.resetForNextRequest(); } @@ -410,7 +458,8 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, "for " + accumulation.trafficChannelKey + " assuming an empty server interaction and NOT " + "reproducing this to the target cluster.").log(); if (accumulation.hasRrPair()) { - listener.onTrafficStreamsExpired(status, accumulation.channelContext, + listener.onTrafficStreamsExpired(status, + accumulation.trafficChannelKey.getTrafficStreamsContext(), Collections.unmodifiableList(accumulation.getRrPair().trafficStreamKeysBeingHeld)); } return; @@ -425,9 +474,7 @@ private void fireAccumulationsCallbacksAndClose(Accumulation accumulation, } } finally { if (accumulation.hasSignaledRequests()) { - accumulation.channelContext.getCurrentSpan().end(); - listener.onConnectionClose(accumulation.trafficChannelKey, accumulation.getIndexOfCurrentRequest(), - accumulation.channelContext, status, accumulation.getLastTimestamp(), + listener.onConnectionClose(accumulation, status, accumulation.getLastTimestamp(), getTrafficStreamsHeldByAccum(accumulation)); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index e1eaebac6..a90775368 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -14,12 +14,14 @@ import io.opentelemetry.context.ContextKey; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import java.net.URI; import java.util.concurrent.CompletableFuture; @@ -66,7 +68,7 @@ public ConnectionReplaySession load(final String s) { } private DiagnosticTrackableCompletableFuture - getResilientClientChannelProducer(EventLoop eventLoop, ChannelKeyContext connectionContext) { + getResilientClientChannelProducer(EventLoop eventLoop, IChannelKeyContext connectionContext) { return new AdaptiveRateLimiter() .get(() -> { var clientConnectionChannelCreatedFuture = @@ -140,9 +142,9 @@ public void closeConnection(String connId) { } public Future - submitEventualSessionGet(ChannelKeyContext channelKey, boolean ignoreIfNotPresent, ChannelKeyContext ctx) { + submitEventualSessionGet(IChannelKeyContext ctx, boolean ignoreIfNotPresent) { ConnectionReplaySession channelFutureAndSchedule = - getCachedSession(channelKey, ignoreIfNotPresent); + getCachedSession(ctx, ignoreIfNotPresent); if (channelFutureAndSchedule == null) { var rval = new DefaultPromise(eventLoopGroup.next()); rval.setSuccess(null); @@ -158,7 +160,7 @@ public void closeConnection(String connId) { } @SneakyThrows - public ConnectionReplaySession getCachedSession(ChannelKeyContext channelKey, boolean dontCreate) { + public ConnectionReplaySession getCachedSession(IChannelKeyContext channelKey, boolean dontCreate) { var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKey.getConnectionId()) : connectionId2ChannelCache.get(channelKey.getConnectionId()); if (crs != null) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java index e87ac3492..ffec2ad7b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java @@ -2,8 +2,10 @@ import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; public interface PacketConsumerFactory { - IPacketFinalizingConsumer create(UniqueReplayerRequestKey requestKey, RequestContext context); + IPacketFinalizingConsumer create(UniqueReplayerRequestKey requestKey, + IContexts.IReplayerHttpTransactionContext context); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index 005c7896e..6dba100bb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -7,7 +7,8 @@ import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -27,9 +28,9 @@ public PacketToTransformingHttpHandlerFactory(IJsonTransformer jsonTransformer, @Override public IPacketFinalizingConsumer> - create(UniqueReplayerRequestKey requestKey, RequestContext requestContext) { + create(UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext httpTransactionContext) { log.trace("creating HttpJsonTransformingConsumer"); return new HttpJsonTransformingConsumer<>(jsonTransformer, authTransformerFactory, - new TransformedPacketReceiver(), requestContext); + new TransformedPacketReceiver(), httpTransactionContext); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index c80fdabfa..e718e7db9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -9,8 +9,9 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; @@ -124,19 +125,20 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - scheduleTransformationWork(RequestContext requestCtx, Instant originalStart, + scheduleTransformationWork(IContexts.IReplayerHttpTransactionContext requestCtx, Instant originalStart, Supplier> task) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "processing"; var start = timeShifter.transformSourceTimeToRealTime(originalStart); logStartOfWork(requestCtx, newCount, start, label); - var result = networkSendOrchestrator.scheduleWork(requestCtx.getEnclosingScope(), + var result = networkSendOrchestrator.scheduleWork(requestCtx, start.minus(EXPECTED_TRANSFORMATION_DURATION), task); return hookWorkFinishingUpdates(result, originalStart, requestCtx, label); } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, RequestContext ctx, Instant originalStart, Instant originalEnd, + scheduleRequest(UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx, + Instant originalStart, Instant originalEnd, int numPackets, Stream packets) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "request"; @@ -154,7 +156,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public void closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - ChannelKeyContext ctx, Instant timestamp) { + IChannelKeyContext ctx, Instant timestamp) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 352c2cda1..cd33c7760 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -3,8 +3,13 @@ import com.google.common.base.Objects; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.nio.charset.StandardCharsets; import java.time.Instant; @@ -23,20 +28,61 @@ public enum ReconstructionStatus { HttpMessageAndTimestamp requestData; HttpMessageAndTimestamp responseData; - @NonNull final ITrafficStreamKey firstTrafficStreamKeyForRequest; + @NonNull final ISourceTrafficChannelKey firstTrafficStreamKeyForRequest; List trafficStreamKeysBeingHeld; ReconstructionStatus completionStatus; - RequestContext requestContext; + // switch between RequestAccumulation/ResponseAccumulation objects when we're parsing, + // or just leave this null, in which case, the context from the trafficStreamKey should be used + private IWithAttributes requestOrResponseAccumulationContext; - public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, RequestContext requestContext) { - firstTrafficStreamKeyForRequest = startingAtTrafficStreamKey; - this.requestContext = requestContext; + public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, + int startingSourceRequestIndex, int indexOfCurrentRequest) { + this.firstTrafficStreamKeyForRequest = startingAtTrafficStreamKey; + var requestKey = new UniqueReplayerRequestKey(startingAtTrafficStreamKey, + startingSourceRequestIndex, indexOfCurrentRequest); + var httpTransactionContext = new Contexts.HttpTransactionContext( + startingAtTrafficStreamKey.getTrafficStreamsContext(), + requestKey, + Accumulation.METERING_CLOSURE.makeSpanContinuation("processingChannel")); + requestOrResponseAccumulationContext = new Contexts.RequestAccumulationContext(httpTransactionContext, + Accumulation.METERING_CLOSURE.makeSpanContinuation("httpTransactionAccumulation")); } - @NonNull ITrafficStreamKey getBeginningTrafficStreamKey() { + @NonNull ISourceTrafficChannelKey getBeginningTrafficStreamKey() { return firstTrafficStreamKeyForRequest; } + public IContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { + var looseCtx = requestOrResponseAccumulationContext; + // the req/response ctx types in the assert below will always implement this with the + // IReplayerHttpTransactionContext parameter, but this seems clearer + // than trying to engineer a compile time static check + assert looseCtx instanceof IWithTypedEnclosingScope; + assert looseCtx instanceof IContexts.IRequestAccumulationContext + || looseCtx instanceof IContexts.IResponseAccumulationContext; + return ((IWithTypedEnclosingScope) looseCtx) + .getLogicalEnclosingScope(); + + } + + public @NonNull IContexts.IRequestAccumulationContext getRequestContext() { + return (IContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; + } + + public @NonNull IContexts.IResponseAccumulationContext getResponseContext() { + return (IContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; + } + + public void rotateRequestGatheringToResponse() { + var looseCtx = requestOrResponseAccumulationContext; + assert looseCtx instanceof IContexts.IRequestAccumulationContext; + var ctx = (IContexts.IRequestAccumulationContext) looseCtx; + var parentCtx = ctx.getLogicalEnclosingScope(); + ctx.getCurrentSpan().end(); + requestOrResponseAccumulationContext = new Contexts.ResponseAccumulationContext(parentCtx, + Accumulation.METERING_CLOSURE.makeSpanContinuation("accumulatingResponse")); + } + public void addRequestData(Instant packetTimeStamp, byte[] data) { if (log.isTraceEnabled()) { log.trace(this + " Adding request data: " + new String(data, StandardCharsets.UTF_8)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index 037670ab1..3eb70c35a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -7,12 +7,12 @@ import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ChannelTaskType; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.ChannelTask; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -38,9 +38,9 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { } public DiagnosticTrackableCompletableFuture - scheduleWork(ChannelKeyContext ctx, Instant timestamp, + scheduleWork(IContexts.IReplayerHttpTransactionContext ctx, Instant timestamp, Supplier> task) { - var connectionSession = clientConnectionPool.getCachedSession(ctx, false); + var connectionSession = clientConnectionPool.getCachedSession(ctx.getChannelKeyContext(), false); var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); @@ -64,19 +64,20 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, RequestContext ctx, + scheduleRequest(UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx, Instant start, Duration interval, Stream packets) { var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final aggregated response"); log.atDebug().setMessage(()->"Scheduling request for "+requestKey+" at start time "+start).log(); return asynchronouslyInvokeRunnableToSetupFuture( - ctx.getEnclosingScope(), requestKey.getReplayerRequestIndex(), false, finalTunneledResponse, + ctx, requestKey.getReplayerRequestIndex(), false, finalTunneledResponse, channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(ctx, channelFutureAndRequestSchedule, finalTunneledResponse, start, interval, packets)); } - public StringTrackableCompletableFuture scheduleClose(ChannelKeyContext ctx, int channelInteractionNum, + public StringTrackableCompletableFuture scheduleClose(IChannelKeyContext ctx, + int channelInteractionNum, Instant timestamp) { var channelKey = ctx.getChannelKey(); var channelInteraction = new IndexedChannelInteraction(channelKey, channelInteractionNum); @@ -98,12 +99,12 @@ public StringTrackableCompletableFuture scheduleClose(ChannelKeyContext ct } private DiagnosticTrackableCompletableFuture - asynchronouslyInvokeRunnableToSetupFuture(ChannelKeyContext ctx, int channelInteractionNumber, + asynchronouslyInvokeRunnableToSetupFuture(IChannelKeyContext ctx, int channelInteractionNumber, boolean ignoreIfChannelNotPresent, DiagnosticTrackableCompletableFuture finalTunneledResponse, Consumer successFn) { var channelFutureAndScheduleFuture = - clientConnectionPool.submitEventualSessionGet(ctx, ignoreIfChannelNotPresent, ctx); + clientConnectionPool.submitEventualSessionGet(ctx, ignoreIfChannelNotPresent); channelFutureAndScheduleFuture.addListener(submitFuture->{ if (!submitFuture.isSuccess()) { log.atError().setCause(submitFuture.cause()) @@ -149,11 +150,12 @@ public StringTrackableCompletableFuture scheduleClose(ChannelKeyContext ct return finalTunneledResponse; } - private void scheduleOnConnectionReplaySession(ChannelKeyContext ctx, int channelInteractionIdx, + private void scheduleOnConnectionReplaySession(IChannelKeyContext ctx, int channelInteractionIdx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture futureToBeCompletedByTask, Instant atTime, ChannelTask task) { - var channelInteraction = new IndexedChannelInteraction(ctx.getChannelKey(), channelInteractionIdx); + var channelInteraction = new IndexedChannelInteraction(ctx.getChannelKey(), + channelInteractionIdx); log.atInfo().setMessage(()->channelInteraction + " scheduling " + task.kind + " at " + atTime).log(); var schedule = channelFutureAndRequestSchedule.schedule; @@ -197,7 +199,7 @@ private void scheduleOnConnectionReplaySession(ChannelKeyContext ctx, int ch }), ()->""); } - private void scheduleSendOnConnectionReplaySession(RequestContext ctx, + private void scheduleSendOnConnectionReplaySession(IContexts.IReplayerHttpTransactionContext ctx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture responseFuture, Instant start, Duration interval, Stream packets) { @@ -207,7 +209,7 @@ private void scheduleSendOnConnectionReplaySession(RequestContext ctx, getPacketReceiver(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); - scheduleOnConnectionReplaySession(ctx.getEnclosingScope(), + scheduleOnConnectionReplaySession(ctx.getLogicalEnclosingScope(), ctx.getReplayerRequestKey().getSourceRequestIndex(), channelFutureAndRequestSchedule, responseFuture, start, new ChannelTask(ChannelTaskType.TRANSMIT, packetSender)); @@ -237,10 +239,10 @@ private long getDelayFromNowMs(Instant to) { } private static NettyPacketToHttpConsumer - getPacketReceiver(RequestContext requestContext, ChannelFuture channelFuture, + getPacketReceiver(IContexts.IReplayerHttpTransactionContext httpTransactionContext, ChannelFuture channelFuture, AtomicReference packetReceiver) { if (packetReceiver.get() == null) { - packetReceiver.set(new NettyPacketToHttpConsumer(channelFuture, requestContext)); + packetReceiver.set(new NettyPacketToHttpConsumer(channelFuture, httpTransactionContext)); } return packetReceiver.get(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java index 93d1bb664..cb41231bf 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java @@ -3,6 +3,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.kafka.KafkaBehavioralPolicy; import org.opensearch.migrations.replay.kafka.KafkaTrafficCaptureSource; +import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; @@ -32,8 +33,10 @@ private TrafficCaptureSourceFactory() {} } if (isKafkaActive) { - return KafkaTrafficCaptureSource.buildKafkaConsumer(appParams.kafkaTrafficBrokers, appParams.kafkaTrafficTopic, - appParams.kafkaTrafficGroupId, appParams.kafkaTrafficEnableMSKAuth, appParams.kafkaTrafficPropertyFile, + return KafkaTrafficCaptureSource.buildKafkaSource( + appParams.kafkaTrafficBrokers, appParams.kafkaTrafficTopic, + appParams.kafkaTrafficGroupId, appParams.kafkaTrafficEnableMSKAuth, + appParams.kafkaTrafficPropertyFile, Clock.systemUTC(), new KafkaBehavioralPolicy()); } else if (isInputFileActive) { return new InputStreamOfTraffic(new FileInputStream(appParams.inputFilename)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index e59ef545d..b0ee9707e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -14,10 +14,11 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; @@ -602,8 +603,8 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { private ITrafficCaptureSource trafficCaptureSource; @Override - public void onRequestReceived(UniqueReplayerRequestKey requestKey, RequestContext ctx, - HttpMessageAndTimestamp request) { + public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx, + @NonNull HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); liveTrafficStreamLimiter.addWork(1); @@ -620,7 +621,8 @@ public void onRequestReceived(UniqueReplayerRequestKey requestKey, RequestContex } @Override - public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, RequestContext ctx, + public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, + IContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrPair) { log.atInfo().setMessage(()->"Done receiving captured stream for " + requestKey + ":" + rrPair.requestData).log(); @@ -636,7 +638,8 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, Req } } - Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, RequestResponsePacketPair rrPair, + Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, + RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { try { // if this comes in with a serious Throwable (not an Exception), don't bother @@ -677,7 +680,8 @@ Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, Re @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ChannelKeyContext ctx, List trafficStreamKeysBeingHeld) { + IChannelKeyContext ctx, + @NonNull List trafficStreamKeysBeingHeld) { commitTrafficStreams(trafficStreamKeysBeingHeld, status); } @@ -698,16 +702,16 @@ private void commitTrafficStreams(List trafficStreamKeysBeing } @Override - public void onConnectionClose(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, - Instant timestamp, List trafficStreamKeysBeingHeld) { + public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, int channelInteractionNum, + IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + @NonNull Instant timestamp, @NonNull List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); commitTrafficStreams(trafficStreamKeysBeingHeld, status); } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, ChannelKeyContext ctx) { + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IChannelKeyContext ctx) { commitTrafficStreams(List.of(tsk), true); } @@ -861,7 +865,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture transformAndSendRequest(ReplayEngine replayEngine, HttpMessageAndTimestamp request, - UniqueReplayerRequestKey requestKey, RequestContext ctx) { + UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx) { return transformAndSendRequest(inputRequestTransformerFactory, replayEngine, ctx, request.getFirstPacketTimestamp(), request.getLastPacketTimestamp(), requestKey, request.packetBytes::stream); @@ -869,7 +873,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture transformAndSendRequest(PacketToTransformingHttpHandlerFactory inputRequestTransformerFactory, - ReplayEngine replayEngine, RequestContext ctx, + ReplayEngine replayEngine, IContexts.IReplayerHttpTransactionContext ctx, @NonNull Instant start, @NonNull Instant end, UniqueReplayerRequestKey requestKey, Supplier> packetsSupplier) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 361688ced..5d68e0326 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -18,17 +18,17 @@ import io.netty.handler.logging.LoggingHandler; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; -import io.opentelemetry.context.ContextKey; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -58,15 +58,15 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - RequestContext tracingContext; + IContexts.IReplayerHttpTransactionContext tracingContext; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, - RequestContext requestContext) { - this(createClientConnection(eventLoopGroup, sslContext, serverUri, requestContext.getEnclosingScope()), - requestContext); + Contexts.HttpTransactionContext httpTransactionContext) { + this(createClientConnection(eventLoopGroup, sslContext, serverUri, + httpTransactionContext.getLogicalEnclosingScope()), httpTransactionContext); } - public NettyPacketToHttpConsumer(ChannelFuture clientConnection, RequestContext ctx) { + public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IContexts.IReplayerHttpTransactionContext ctx) { this.tracingContext = ctx; responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = @@ -91,7 +91,7 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, RequestContext } public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, SslContext sslContext, - URI serverUri, ChannelKeyContext channelKeyContext) { + URI serverUri, IChannelKeyContext channelKeyContext) { String host = serverUri.getHost(); int port = serverUri.getPort(); log.atTrace().setMessage(()->"Active - setting up backend connection to " + host + ":" + port).log(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 2076a5529..89bb5c6e2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -3,27 +3,24 @@ import io.netty.buffer.ByteBuf; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.HttpRequestDecoder; -import io.opentelemetry.api.trace.Span; -import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; -import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; import org.slf4j.event.Level; import java.nio.charset.StandardCharsets; -import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Optional; @@ -58,7 +55,7 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume private final RequestPipelineOrchestrator pipelineOrchestrator; private final EmbeddedChannel channel; private static final MetricsLogger metricsLogger = new MetricsLogger("HttpJsonTransformingConsumer"); - private IWithStartTimeAndAttributes transformationContext; + private Contexts.RequestTransformationContext transformationContext; /** * Roughly try to keep track of how big each data chunk was that came into the transformer. These values @@ -75,19 +72,15 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume public HttpJsonTransformingConsumer(IJsonTransformer transformer, IAuthTransformerFactory authTransformerFactory, IPacketFinalizingConsumer transformedPacketReceiver, - RequestContext requestContext) { - this.transformationContext = new IWithStartTimeAndAttributes<>() { - @Getter Span currentSpan = METERING_CLOSURE.makeSpanContinuation("httpRequestTransformation") - .apply(requestContext.getPopulatedAttributes(), requestContext.getCurrentSpan()); - @Getter Instant startTime = Instant.now(); - @Override public RequestContext getEnclosingScope() { return requestContext; } - }; + IContexts.IReplayerHttpTransactionContext httpTransactionContext) { + transformationContext = new Contexts.RequestTransformationContext(httpTransactionContext, + METERING_CLOSURE.makeSpanContinuation("transformation")); chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); channel = new EmbeddedChannel(); pipelineOrchestrator = new RequestPipelineOrchestrator<>(chunkSizes, transformedPacketReceiver, - authTransformerFactory, requestContext); + authTransformerFactory, httpTransactionContext); pipelineOrchestrator.addInitialHandlers(channel.pipeline(), transformer); } @@ -159,14 +152,14 @@ public DiagnosticTrackableCompletableFuture"transformedHttpMessageValue"); } @@ -193,7 +186,7 @@ private static Throwable unwindPossibleCompletionException(Throwable t) { ()->"HttpJsonTransformingConsumer.redriveWithoutTransformation.compose()"); metricsLogger.atError(MetricsEvent.REQUEST_REDRIVEN_WITHOUT_TRANSFORMATION, reason) .setAttribute(MetricsAttributeKey.REQUEST_ID, transformationContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, transformationContext.getEnclosingScope().getConnectionId()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, transformationContext.getLogicalEnclosingScope().getConnectionId()) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); return finalizedFuture.map(f->f.thenApply(r->reason == null ? new TransformedOutputAndResult(r, HttpRequestTransformationStatus.SKIPPED, null) : diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 2957a7a70..2ee09e821 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -10,7 +10,8 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datahandlers.PayloadAccessFaultingMap; import org.opensearch.migrations.replay.datahandlers.PayloadNotLoadedException; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IJsonTransformer; @@ -27,18 +28,18 @@ public class NettyDecodedHttpRequestPreliminaryConvertHandler extends Channel final IJsonTransformer transformer; final List> chunkSizes; final String diagnosticLabel; - private RequestContext requestContext; + private IContexts.IReplayerHttpTransactionContext httpTransactionContext; static final MetricsLogger metricsLogger = new MetricsLogger("NettyDecodedHttpRequestPreliminaryConvertHandler"); public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, - RequestContext requestContext) { + IContexts.IReplayerHttpTransactionContext httpTransactionContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; - this.diagnosticLabel = "[" + requestContext + "] "; - this.requestContext = requestContext; + this.diagnosticLabel = "[" + httpTransactionContext + "] "; + this.httpTransactionContext = httpTransactionContext; } @Override @@ -54,8 +55,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception .append(request.protocolVersion().text()) .toString()); metricsLogger.atSuccess(MetricsEvent.CAPTURED_REQUEST_PARSED_TO_HTTP) - .setAttribute(MetricsAttributeKey.REQUEST_ID, requestContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, requestContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, httpTransactionContext) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, httpTransactionContext.getConnectionId()) .setAttribute(MetricsAttributeKey.HTTP_METHOD, request.method()) .setAttribute(MetricsAttributeKey.HTTP_ENDPOINT, request.uri()).emit(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java index e46763082..641f4d916 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java @@ -8,7 +8,8 @@ import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -29,15 +30,15 @@ public class NettySendByteBufsToPacketHandlerHandler extends ChannelInboundHa DiagnosticTrackableCompletableFuture currentFuture; private AtomicReference>> packetReceiverCompletionFutureRef; - RequestContext requestContext; + IContexts.IReplayerHttpTransactionContext httpTransactionContext; public NettySendByteBufsToPacketHandlerHandler(IPacketFinalizingConsumer packetReceiver, - RequestContext requestContext) { + IContexts.IReplayerHttpTransactionContext httpTransactionContext) { this.packetReceiver = packetReceiver; this.packetReceiverCompletionFutureRef = new AtomicReference<>(); - this.requestContext = requestContext; + this.httpTransactionContext = httpTransactionContext; currentFuture = DiagnosticTrackableCompletableFuture.Factory.completedFuture(null, - ()->"currentFuture for NettySendByteBufsToPacketHandlerHandler initialized to the base case for " + requestContext); + ()->"currentFuture for NettySendByteBufsToPacketHandlerHandler initialized to the base case for " + httpTransactionContext); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 36c72e28b..e3590f94b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -10,7 +10,7 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -42,19 +42,19 @@ public class RequestPipelineOrchestrator { public static final String HTTP_REQUEST_DECODER_NAME = "HTTP_REQUEST_DECODER"; private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; - private RequestContext requestContext; + private IContexts.IReplayerHttpTransactionContext httpTransactionContext; @Getter final IAuthTransformerFactory authTransfomerFactory; public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, - RequestContext requestContext) { + IContexts.IReplayerHttpTransactionContext httpTransactionContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : IAuthTransformerFactory.NullAuthTransformerFactory.instance; - this.requestContext = requestContext; + this.httpTransactionContext = httpTransactionContext; } static void removeThisAndPreviousHandlers(ChannelPipeline pipeline, ChannelHandler targetHandler) { @@ -97,7 +97,7 @@ void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) // HttpRequestDecoder when the HttpRequestDecoder is removed from the pipeline BEFORE the // NettyDecodedHttpRequestHandler is removed. pipeline.addLast(new NettyDecodedHttpRequestPreliminaryConvertHandler(transformer, chunkSizes, - this, requestContext)); + this, httpTransactionContext)); addLoggingHandler(pipeline, "B"); } @@ -147,7 +147,7 @@ void addBaselineHandlers(ChannelPipeline pipeline) { // OUT: nothing - terminal! ByteBufs are routed to the packet handler! addLoggingHandler(pipeline, "K"); pipeline.addLast(OFFLOADING_HANDLER_NAME, - new NettySendByteBufsToPacketHandlerHandler(packetReceiver, requestContext)); + new NettySendByteBufsToPacketHandlerHandler(packetReceiver, httpTransactionContext)); } private void addLoggingHandler(ChannelPipeline pipeline, String name) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java index 617767a4d..ccccf7ee4 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java @@ -7,9 +7,9 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.tracing.ChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.OnlineRadixSorter; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; /** * This class contains everything that is needed to replay packets to a specific channel. @@ -35,7 +35,7 @@ public class ConnectionReplaySession { @Getter @Setter - private ChannelKeyContext channelContext; + private IChannelKeyContext channelContext; public ConnectionReplaySession(EventLoop eventLoop) { this.eventLoop = eventLoop; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java index 3a0eb4644..b398ec088 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java @@ -1,7 +1,11 @@ package org.opensearch.migrations.replay.datatypes; +import lombok.NonNull; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; + public interface ISourceTrafficChannelKey { String getNodeId(); - String getConnectionId(); + @NonNull IContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java index ab9d6ced4..8ca33f1cd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java @@ -1,5 +1,8 @@ package org.opensearch.migrations.replay.datatypes; +import lombok.NonNull; +import org.opensearch.migrations.replay.tracing.IContexts; + public interface ITrafficStreamKey extends ISourceTrafficChannelKey { int getTrafficStreamIndex(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java index 564b6dbf5..fb931f674 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java @@ -3,6 +3,10 @@ import java.util.StringJoiner; import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NonNull; +import lombok.Setter; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -11,6 +15,10 @@ public class PojoTrafficStreamKey implements ITrafficStreamKey { private final String nodeId; private final String connectionId; private final int trafficStreamIndex; + @Getter + @Setter + @NonNull + IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; public PojoTrafficStreamKey(TrafficStream stream) { this(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java index d4cc37b5f..54e116855 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java @@ -4,11 +4,11 @@ @EqualsAndHashCode(callSuper = true) public class UniqueReplayerRequestKey extends UniqueSourceRequestKey { - public final ISourceTrafficChannelKey trafficStreamKey; + public final ITrafficStreamKey trafficStreamKey; public final int sourceRequestIndexOffsetAtStartOfAccumulation; public final int replayerRequestIndex; - public UniqueReplayerRequestKey(ISourceTrafficChannelKey streamKey, int sourceOffsetAtStartOfAccumulation, + public UniqueReplayerRequestKey(ITrafficStreamKey streamKey, int sourceOffsetAtStartOfAccumulation, int replayerIndex) { this.trafficStreamKey = streamKey; this.sourceRequestIndexOffsetAtStartOfAccumulation = sourceOffsetAtStartOfAccumulation; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 267b29fb4..f15c5c4a9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -12,8 +12,10 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamWithKey; +import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.FileInputStream; @@ -32,8 +34,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; /** * Adapt a Kafka stream into a TrafficCaptureSource. @@ -64,19 +64,26 @@ @Slf4j public class KafkaTrafficCaptureSource implements ISimpleTrafficCaptureSource { + public static final String TELEMETRY_SCOPE_NAME = "KafkaSource"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + public static final String MAX_POLL_INTERVAL_KEY = "max.poll.interval.ms"; // see https://stackoverflow.com/questions/39730126/difference-between-session-timeout-ms-and-max-poll-interval-ms-for-kafka-0-10 public static final String DEFAULT_POLL_INTERVAL_MS = "60000"; private static final MetricsLogger metricsLogger = new MetricsLogger("KafkaProtobufConsumer"); + final TrackingKafkaConsumer trackingKafkaConsumer; private final ExecutorService kafkaExecutor; private final AtomicLong trafficStreamsRead; private final KafkaBehavioralPolicy behavioralPolicy; + private final ChannelContextManager channelContextManager; - public KafkaTrafficCaptureSource(Consumer kafkaConsumer, String topic, Duration keepAliveInterval) { - this(kafkaConsumer, topic, keepAliveInterval, Clock.systemUTC(), new KafkaBehavioralPolicy()); + public KafkaTrafficCaptureSource(Consumer kafkaConsumer, + String topic, Duration keepAliveInterval) { + this(kafkaConsumer, topic, keepAliveInterval, + Clock.systemUTC(), new KafkaBehavioralPolicy()); } public KafkaTrafficCaptureSource(Consumer kafkaConsumer, @@ -85,20 +92,22 @@ public KafkaTrafficCaptureSource(Consumer kafkaConsumer, Clock clock, @NonNull KafkaBehavioralPolicy behavioralPolicy) { - trackingKafkaConsumer = new TrackingKafkaConsumer(kafkaConsumer, topic, keepAliveInterval, clock); + this.channelContextManager = new ChannelContextManager(); + trackingKafkaConsumer = new TrackingKafkaConsumer(kafkaConsumer, topic, keepAliveInterval, clock, + tskList->tskList.forEach(channelContextManager::releaseContextFor)); trafficStreamsRead = new AtomicLong(); this.behavioralPolicy = behavioralPolicy; kafkaConsumer.subscribe(Collections.singleton(topic), trackingKafkaConsumer); kafkaExecutor = Executors.newSingleThreadExecutor(); } - public static KafkaTrafficCaptureSource buildKafkaConsumer(@NonNull String brokers, - @NonNull String topic, - @NonNull String groupId, - boolean enableMSKAuth, - String propertyFilePath, - @NonNull Clock clock, - @NonNull KafkaBehavioralPolicy behavioralPolicy) + public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull String brokers, + @NonNull String topic, + @NonNull String groupId, + boolean enableMSKAuth, + String propertyFilePath, + @NonNull Clock clock, + @NonNull KafkaBehavioralPolicy behavioralPolicy) throws IOException { var kafkaProps = buildKafkaProperties(brokers, groupId, enableMSKAuth, propertyFilePath); @@ -152,7 +161,7 @@ public static Properties buildKafkaProperties(@NonNull String brokers, @Override @SneakyThrows public void touch() { - CompletableFuture.runAsync(()->trackingKafkaConsumer.touch(), kafkaExecutor).get(); + CompletableFuture.runAsync(trackingKafkaConsumer::touch, kafkaExecutor).get(); } /** @@ -190,7 +199,8 @@ public List readNextTrafficStreamSynchronously() { var trafficStreamsSoFar = trafficStreamsRead.incrementAndGet(); log.atTrace().setMessage(()->"Parsed traffic stream #" + trafficStreamsSoFar + ": " + offsetData + " " + ts).log(); - var key = new TrafficStreamKeyWithKafkaRecordId(ts, offsetData); + var key = new TrafficStreamKeyWithKafkaRecordId( + channelContextManager::retainOrCreateContext, ts, kafkaRecord.key(), offsetData); return (ITrafficStreamWithKey) new PojoTrafficStreamWithKey(ts, key); } catch (InvalidProtocolBufferException e) { RuntimeException recordError = behavioralPolicy.onInvalidKafkaRecord(kafkaRecord, e); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 418a544be..6d4cdffa1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -1,6 +1,5 @@ package org.opensearch.migrations.replay.kafka; -import com.google.protobuf.InvalidProtocolBufferException; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; @@ -8,22 +7,18 @@ import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.requests.TxnOffsetCommitRequest; -import org.opensearch.migrations.coreutils.MetricsAttributeKey; -import org.opensearch.migrations.coreutils.MetricsEvent; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamWithKey; -import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.trafficcapture.protos.TrafficStream; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.slf4j.event.Level; import java.time.Clock; import java.time.Duration; import java.time.Instant; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -65,13 +60,16 @@ public class TrackingKafkaConsumer implements ConsumerRebalanceListener { final Map partitionToOffsetLifecycleTrackerMap; // loosening visibility so that a unit test can read this final Map nextSetOfCommitsMap; + final Map> nextSetOfKeysBeingCommitted; + final java.util.function.Consumer> onCommitKeysCallback; private final Duration keepAliveInterval; private final AtomicReference lastTouchTimeRef; - private AtomicInteger consumerConnectionGeneration; - private AtomicInteger kafkaRecordsLeftToCommit; + private final AtomicInteger consumerConnectionGeneration; + private final AtomicInteger kafkaRecordsLeftToCommit; public TrackingKafkaConsumer(Consumer kafkaConsumer, String topic, - Duration keepAliveInterval, Clock c) { + Duration keepAliveInterval, Clock c, + java.util.function.Consumer> onCommitKeysCallback) { this.kafkaConsumer = kafkaConsumer; this.topic = topic; this.clock = c; @@ -81,13 +79,17 @@ public TrackingKafkaConsumer(Consumer kafkaConsumer, String topi consumerConnectionGeneration = new AtomicInteger(); kafkaRecordsLeftToCommit = new AtomicInteger(); this.keepAliveInterval = keepAliveInterval; + this.nextSetOfKeysBeingCommitted = new HashMap<>(); + this.onCommitKeysCallback = onCommitKeysCallback; } @Override public void onPartitionsRevoked(Collection partitions) { safeCommit(); partitions.forEach(p->{ - nextSetOfCommitsMap.remove(new TopicPartition(topic, p.partition())); + var tp = new TopicPartition(topic, p.partition()); + nextSetOfCommitsMap.remove(tp); + nextSetOfKeysBeingCommitted.remove(tp); partitionToOffsetLifecycleTrackerMap.remove(p.partition()); }); kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() @@ -147,11 +149,11 @@ private void pause() { } catch (IllegalStateException e) { log.atError().setCause(e).setMessage(()->"Unable to pause the topic partitions: " + topic + ". " + "The active partitions passed here : " + activePartitions.stream() - .map(x->x.toString()).collect(Collectors.joining(",")) + ". " + + .map(x->""+x).collect(Collectors.joining(",")) + ". " + "The active partitions as tracked here are: " + getActivePartitions().stream() - .map(x->x.toString()).collect(Collectors.joining(",")) + ". " + + .map(x->""+x).collect(Collectors.joining(",")) + ". " + "The active partitions according to the consumer: " + kafkaConsumer.assignment().stream() - .map(x->x.toString()).collect(Collectors.joining(",")) + .map(x->""+x).collect(Collectors.joining(",")) ).log(); } } @@ -165,11 +167,11 @@ private void resume() { "This may not be a fatal error for the entire process as the consumer should eventually" + " rejoin and rebalance. " + "The active partitions passed here : " + activePartitions.stream() - .map(x->x.toString()).collect(Collectors.joining(",")) + ". " + + .map(x->""+x).collect(Collectors.joining(",")) + ". " + "The active partitions as tracked here are: " + getActivePartitions().stream() - .map(x->x.toString()).collect(Collectors.joining(",")) + ". " + + .map(x->""+x).collect(Collectors.joining(",")) + ". " + "The active partitions according to the consumer: " + kafkaConsumer.assignment().stream() - .map(x->x.toString()).collect(Collectors.joining(",")) + .map(x->""+x).collect(Collectors.joining(",")) ).log(); } } @@ -247,6 +249,8 @@ private void safeCommit() { if (!nextSetOfCommitsMap.isEmpty()) { log.atDebug().setMessage(() -> "Committing " + nextSetOfCommitsMap).log(); kafkaConsumer.commitSync(nextSetOfCommitsMap); + onCommitKeysCallback.accept(nextSetOfKeysBeingCommitted.values().stream().flatMap(Collection::stream)); + nextSetOfKeysBeingCommitted.clear(); nextSetOfCommitsMap.clear(); log.trace("partitionToOffsetLifecycleTrackerMap="+partitionToOffsetLifecycleTrackerMap); kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java index 4a19942f5..ee3385974 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java @@ -2,28 +2,43 @@ import lombok.EqualsAndHashCode; import lombok.Getter; -import lombok.ToString; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.util.StringJoiner; +import java.util.function.Function; @EqualsAndHashCode(callSuper = true) @Getter class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKey implements KafkaCommitOffsetData { + public static final String TELEMETRY_SCOPE_NAME = "KafkaRecords"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + private final int generation; private final int partition; private final long offset; - TrafficStreamKeyWithKafkaRecordId(TrafficStream trafficStream, KafkaCommitOffsetData ok) { - this(trafficStream, ok.getGeneration(), ok.getPartition(), ok.getOffset()); + TrafficStreamKeyWithKafkaRecordId(Function contextFactory, + TrafficStream trafficStream, String recordId, KafkaCommitOffsetData ok) { + this(contextFactory, trafficStream, recordId, ok.getGeneration(), ok.getPartition(), ok.getOffset()); } - TrafficStreamKeyWithKafkaRecordId(TrafficStream trafficStream, int generation, int partition, long offset) { + TrafficStreamKeyWithKafkaRecordId(Function contextFactory, + TrafficStream trafficStream, String recordId, + int generation, int partition, long offset) { super(trafficStream); this.generation = generation; this.partition = partition; this.offset = offset; + var channelKeyContext = contextFactory.apply(this); + var kafkaContext = new Contexts.KafkaRecordContext(channelKeyContext, recordId, + METERING_CLOSURE.makeSpanContinuation("kafkaRecord")); + this.setTrafficStreamsContext(new Contexts.TrafficStreamsLifecycleContext(kafkaContext, this, + METERING_CLOSURE.makeSpanContinuation("trafficStreamLifecycle"))); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java new file mode 100644 index 000000000..5d6db09c8 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java @@ -0,0 +1,39 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import lombok.NonNull; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithStartTime; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; + +import java.time.Instant; + +public abstract class AbstractNestedSpanContext implements + IWithAttributes, IWithStartTime { + final T enclosingScope; + @Getter final Instant startTime; + @Getter private Span currentSpan; + + public AbstractNestedSpanContext(@NonNull T enclosingScope) { + this.enclosingScope = enclosingScope; + this.startTime = Instant.now(); + } + + @Override + public IWithAttributes getEnclosingScope() { + return enclosingScope; + } + + public T getImmediateEnclosingScope() { return enclosingScope; } + + protected void setCurrentSpan(@NonNull ISpanWithParentGenerator spanGenerator) { + setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan())); + } + + protected void setCurrentSpan(@NonNull Span s) { + assert currentSpan == null : "only expect to set the current span once"; + currentSpan = s; + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java new file mode 100644 index 000000000..eb0b21cbc --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -0,0 +1,39 @@ +package org.opensearch.migrations.replay.tracing; + +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.tracing.ISpanGenerator; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; + +import java.util.HashMap; +import java.util.function.Function; + +public class ChannelContextManager implements Function { + public static final String TELEMETRY_SCOPE_NAME = "Channel"; + public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + HashMap connectionToChannelContextMap = new HashMap<>(); + + public ChannelKeyContext apply(ITrafficStreamKey tsk) { + return retainOrCreateContext(tsk); + } + + public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { + return retainOrCreateContext(tsk, METERING_CLOSURE.makeSpanContinuation("channel", null)); + } + + public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk, ISpanGenerator spanGenerator) { + return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), + k-> new ChannelKeyContext(tsk, spanGenerator).retain()); + } + + public ChannelKeyContext releaseContextFor(ITrafficStreamKey tsk) { + var connectionId = tsk.getConnectionId(); + var ctx = connectionToChannelContextMap.get(connectionId); + var finalRelease = ctx.release(); + if (finalRelease) { + ctx.currentSpan.end(); + connectionToChannelContextMap.remove(connectionId); + } + return ctx; + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java index 419793eca..e58303ae9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -2,35 +2,46 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; -import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; +import org.opensearch.migrations.tracing.ISpanGenerator; +import org.opensearch.migrations.tracing.IWithStartTime; -import java.util.StringJoiner; +import java.time.Instant; -public class ChannelKeyContext implements IConnectionContext { +public class ChannelKeyContext implements IChannelKeyContext, IWithStartTime { @Getter final ISourceTrafficChannelKey channelKey; @Getter final Span currentSpan; + @Getter + final Instant startTime; + @Getter + int refCount; public ChannelKeyContext(ISourceTrafficChannelKey channelKey, ISpanGenerator spanGenerator) { this.channelKey = channelKey; this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); + this.startTime = Instant.now(); } @Override - public String getConnectionId() { - return channelKey.getConnectionId(); + public String toString() { + return channelKey.toString(); } - @Override - public String getNodeId() { - return channelKey.getNodeId(); + public ChannelKeyContext retain() { + refCount++; + return this; } - @Override - public String toString() { - return channelKey.toString(); + /** + * Returns true if this was the final release + * + * @return + */ + public boolean release() { + refCount--; + assert refCount >= 0; + return refCount == 0; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java new file mode 100644 index 000000000..0849576ea --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java @@ -0,0 +1,159 @@ +package org.opensearch.migrations.replay.tracing; + +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; + +public class Contexts { + + private Contexts() {} + + public static class KafkaRecordContext extends DirectNestedSpanContext + implements IContexts.IKafkaRecordContext { + final String recordId; + + public KafkaRecordContext(IChannelKeyContext enclosingScope, String recordId, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + this.recordId = recordId; + setCurrentSpan(spanGenerator); + } + + @Override + public String getRecordId() { + return recordId; + } + } + + public static class TrafficStreamsLifecycleContext + extends IndirectNestedSpanContext + implements IContexts.ITrafficStreamsLifecycleContext { + private final ITrafficStreamKey trafficStreamKey; + + public TrafficStreamsLifecycleContext(IContexts.IKafkaRecordContext enclosingScope, + ITrafficStreamKey trafficStreamKey, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + this.trafficStreamKey = trafficStreamKey; + setCurrentSpan(spanGenerator); + } + + @Override + public IChannelKeyContext getChannelKeyContext() { + return getLogicalEnclosingScope(); + } + + @Override + public ITrafficStreamKey getTrafficStreamKey() { + return trafficStreamKey; + } + + @Override + public IChannelKeyContext getLogicalEnclosingScope() { + return enclosingScope.getLogicalEnclosingScope(); + } + } + + public static class HttpTransactionContext + extends IndirectNestedSpanContext + implements IContexts.IReplayerHttpTransactionContext { + final UniqueReplayerRequestKey replayerRequestKey; + + public HttpTransactionContext(IContexts.ITrafficStreamsLifecycleContext enclosingScope, + UniqueReplayerRequestKey replayerRequestKey, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + this.replayerRequestKey = replayerRequestKey; + setCurrentSpan(spanGenerator); + } + + public IChannelKeyContext getChannelKeyContext() { + return getLogicalEnclosingScope(); + } + + @Override + public UniqueReplayerRequestKey getReplayerRequestKey() { + return replayerRequestKey; + } + + @Override + public String toString() { + return replayerRequestKey.toString(); + } + + @Override + public IChannelKeyContext getLogicalEnclosingScope() { + return enclosingScope.getLogicalEnclosingScope(); + } + } + + public static class RequestAccumulationContext + extends DirectNestedSpanContext + implements IContexts.IRequestAccumulationContext { + public RequestAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class ResponseAccumulationContext + extends DirectNestedSpanContext + implements IContexts.IResponseAccumulationContext { + public ResponseAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class RequestTransformationContext + extends DirectNestedSpanContext + implements IContexts.IRequestTransformationContext { + public RequestTransformationContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class WaitingForHttpResponseContext + extends DirectNestedSpanContext + implements IContexts.IWaitingForHttpResponseContext { + public WaitingForHttpResponseContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class ReceivingHttpResponseContext + extends DirectNestedSpanContext + implements IContexts.IReceivingHttpResponseContext { + public ReceivingHttpResponseContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class RequestSendingContext + extends DirectNestedSpanContext + implements IContexts.IRequestSendingContext { + public RequestSendingContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class TupleHandlingContext + extends DirectNestedSpanContext + implements IContexts.ITupleHandlingContext { + public TupleHandlingContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java new file mode 100644 index 000000000..214665804 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java @@ -0,0 +1,18 @@ +package org.opensearch.migrations.replay.tracing; + +import lombok.NonNull; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; + +public class DirectNestedSpanContext + extends AbstractNestedSpanContext + implements IWithTypedEnclosingScope { + public DirectNestedSpanContext(@NonNull T enclosingScope) { + super(enclosingScope); + } + + @Override + public T getLogicalEnclosingScope() { + return (T) getEnclosingScope(); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java new file mode 100644 index 000000000..0aa5ffa20 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java @@ -0,0 +1,18 @@ +package org.opensearch.migrations.replay.tracing; + +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; + +public interface IChannelKeyContext extends IConnectionContext { + // do not add this as a property + // because its components are already being added in the IConnectionContext implementation + ISourceTrafficChannelKey getChannelKey(); + + default String getConnectionId() { + return getChannelKey().getConnectionId(); + } + + default String getNodeId() { + return getChannelKey().getNodeId(); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java new file mode 100644 index 000000000..aecb2ade7 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java @@ -0,0 +1,79 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; +import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; + +public class IContexts { + public static interface IKafkaRecordContext + extends IWithAttributes, IWithTypedEnclosingScope { + static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); + + String getRecordId(); + + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return IWithAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); + } + } + + public static interface ITrafficStreamsLifecycleContext + extends IChannelKeyContext, IWithTypedEnclosingScope { + ITrafficStreamKey getTrafficStreamKey(); + IChannelKeyContext getChannelKeyContext(); + default ISourceTrafficChannelKey getChannelKey() { + return getChannelKeyContext().getChannelKey(); + } + } + + public static interface IReplayerHttpTransactionContext + extends IHttpTransactionContext, IChannelKeyContext, IWithTypedEnclosingScope { + static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); + + UniqueReplayerRequestKey getReplayerRequestKey(); + IChannelKeyContext getChannelKeyContext(); + + default ISourceTrafficChannelKey getChannelKey() { + return getChannelKeyContext().getChannelKey(); + } + + default long getSourceRequestIndex() { + return getReplayerRequestKey().getSourceRequestIndex(); + } + + default long replayerRequestIndex() { + return getReplayerRequestKey().getReplayerRequestIndex(); + } + + @Override + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return IHttpTransactionContext.super.fillAttributes( + builder.put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex())); + } + } + + public static interface IRequestAccumulationContext + extends IWithAttributes, IWithTypedEnclosingScope { } + + public static interface IResponseAccumulationContext + extends IWithAttributes, IWithTypedEnclosingScope { } + + public static interface IRequestTransformationContext + extends IWithAttributes, IWithTypedEnclosingScope { } + + public static interface IWaitingForHttpResponseContext + extends IWithAttributes, IWithTypedEnclosingScope { } + + public static interface IReceivingHttpResponseContext + extends IWithAttributes, IWithTypedEnclosingScope { } + + public static interface IRequestSendingContext + extends IWithAttributes, IWithTypedEnclosingScope { } + + public static interface ITupleHandlingContext + extends IWithAttributes, IWithTypedEnclosingScope { } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java deleted file mode 100644 index 3de2e3dc1..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayerRequestContext.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.AttributesBuilder; -import org.opensearch.migrations.tracing.commoncontexts.IRequestContext; - -public interface IReplayerRequestContext extends IRequestContext { - static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); - - long replayerRequestIndex(); - - @Override - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IRequestContext.super.fillAttributes( - builder.put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex())); - } -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java new file mode 100644 index 000000000..6c3f2883d --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java @@ -0,0 +1,13 @@ +package org.opensearch.migrations.replay.tracing; + +import lombok.NonNull; +import org.opensearch.migrations.tracing.IWithAttributes; + +public abstract class IndirectNestedSpanContext + extends AbstractNestedSpanContext { + public IndirectNestedSpanContext(@NonNull D enclosingScope) { + super(enclosingScope); + } + + public abstract L getLogicalEnclosingScope(); +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java deleted file mode 100644 index e90bfdf14..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RequestContext.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import io.opentelemetry.api.trace.Span; -import lombok.Getter; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.EmptyContext; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.IWithAttributes; -import org.opensearch.migrations.tracing.IWithStartTime; - -import java.time.Instant; - -public class RequestContext implements IReplayerRequestContext, IWithStartTime { - @Getter - ChannelKeyContext enclosingScope; - @Getter - final UniqueReplayerRequestKey replayerRequestKey; - @Getter - final Instant startTime; - @Getter - final Span currentSpan; - - public RequestContext(ChannelKeyContext enclosingScope, UniqueReplayerRequestKey replayerRequestKey, - ISpanWithParentGenerator spanGenerator) { - this.enclosingScope = enclosingScope; - this.replayerRequestKey = replayerRequestKey; - this.startTime = Instant.now(); - this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan()); - } - - public String getConnectionId() { - return enclosingScope.getConnectionId(); - } - - @Override - public long getSourceRequestIndex() { - return replayerRequestKey.getSourceRequestIndex(); - } - - @Override - public long replayerRequestIndex() { - return replayerRequestKey.getReplayerRequestIndex(); - } - - public ISourceTrafficChannelKey getChannelKey() { - return replayerRequestKey.trafficStreamKey; - } - - @Override - public String toString() { - return replayerRequestKey.toString(); - } -} diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index b82c4b3ef..a7ac23fc8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -12,7 +12,9 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamWithKey; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -179,13 +181,16 @@ private static class TrafficStreamCursorKey implements ITrafficStreamKey, Compar public final String connectionId; public final String nodeId; public final int trafficStreamIndex; - + @Getter public final IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; public TrafficStreamCursorKey(TrafficStream stream, int arrayIndex) { connectionId = stream.getConnectionId(); nodeId = stream.getNodeId(); trafficStreamIndex = TrafficStreamUtils.getTrafficStreamIndex(stream); this.arrayIndex = arrayIndex; + var key = new PojoTrafficStreamKey(connectionId, nodeId, trafficStreamIndex); + this.trafficStreamsContext = new TestTrafficStreamsLifecycleContext(key); + key.setTrafficStreamsContext(this.trafficStreamsContext); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index a8701076d..5a45babea 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -52,7 +52,7 @@ public void testThatSchedulingWorks() throws Exception { } var connectionCtx = TestRequestKey.getTestConnectionRequestContext(NUM_REQUESTS_TO_SCHEDULE); var closeFuture = senderOrchestrator.scheduleClose( - connectionCtx.getEnclosingScope(), NUM_REQUESTS_TO_SCHEDULE, + connectionCtx.getLogicalEnclosingScope(), NUM_REQUESTS_TO_SCHEDULE, lastEndTime.plus(Duration.ofMillis(100))); Assertions.assertEquals(NUM_REQUESTS_TO_SCHEDULE, scheduledItems.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 400407dec..fa52d308e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -3,7 +3,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.netty.buffer.Unpooled; -import io.opentelemetry.api.GlobalOpenTelemetry; import lombok.extern.slf4j.Slf4j; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; @@ -20,9 +19,7 @@ import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; -import org.opensearch.migrations.tracing.EmptyContext; import java.io.IOException; import java.io.InputStream; @@ -232,8 +229,9 @@ public void testOutputterForPost() throws IOException { @Test private void testOutputterForRequest(String requestResourceName, String expected) throws IOException { var trafficStreamKey = new PojoTrafficStreamKey(NODE_ID,"c",0); - var sourcePair = new RequestResponsePacketPair(trafficStreamKey, - TestRequestKey.getTestConnectionRequestContext(0)); + var requestCtx = TestRequestKey.getTestConnectionRequestContext(0); + trafficStreamKey.setTrafficStreamsContext(requestCtx.getImmediateEnclosingScope()); + var sourcePair = new RequestResponsePacketPair(trafficStreamKey, 0, 0); var rawRequestData = loadResourceAsBytes("/requests/raw/" + requestResourceName); sourcePair.addRequestData(Instant.EPOCH, rawRequestData); var rawResponseData = NettyPacketToHttpConsumerTest.EXPECTED_RESPONSE_STRING.getBytes(StandardCharsets.UTF_8); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index b7a73eab4..1e05fdb7e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -2,7 +2,6 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; -import io.opentelemetry.api.GlobalOpenTelemetry; import io.vavr.Tuple2; import lombok.AllArgsConstructor; import lombok.NonNull; @@ -15,8 +14,9 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; @@ -31,7 +31,6 @@ import java.util.Arrays; import java.util.List; import java.util.SortedSet; -import java.util.StringJoiner; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -218,13 +217,15 @@ void generateAndTest(String testName, int bufferSize, int skipCount, new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey key, RequestContext ctx, + public void onRequestReceived(UniqueReplayerRequestKey key, + IContexts.IReplayerHttpTransactionContext ctx, HttpMessageAndTimestamp request) { requestsReceived.incrementAndGet(); } @Override - public void onFullDataReceived(UniqueReplayerRequestKey requestKey, RequestContext ctx, + public void onFullDataReceived(UniqueReplayerRequestKey requestKey, + IContexts.IReplayerHttpTransactionContext ctx, RequestResponsePacketPair fullPair) { var sourceIdx = requestKey.getSourceRequestIndex(); if (fullPair.completionStatus == @@ -245,19 +246,19 @@ public void onFullDataReceived(UniqueReplayerRequestKey requestKey, RequestConte @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ChannelKeyContext ctx, - List trafficStreamKeysBeingHeld) {} + IChannelKeyContext ctx, + @NonNull List trafficStreamKeysBeingHeld) {} @Override - public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - ChannelKeyContext ctx, + public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, + IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, - Instant when, - List trafficStreamKeysBeingHeld) { + @NonNull Instant when, + @NonNull List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - ChannelKeyContext ctx) { + IChannelKeyContext ctx) { tsIndicesReceived.add(tsk.getTrafficStreamIndex()); } }); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index a7fdfda40..684e870d9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -9,8 +9,9 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; +import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; @@ -156,7 +157,8 @@ public void testReader() throws Exception { new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey id, RequestContext ctx, + public void onRequestReceived(UniqueReplayerRequestKey id, + IContexts.IReplayerHttpTransactionContext ctx, HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); @@ -164,7 +166,8 @@ public void onRequestReceived(UniqueReplayerRequestKey id, RequestContext ctx, } @Override - public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, + public void onFullDataReceived(UniqueReplayerRequestKey key, + IContexts.IReplayerHttpTransactionContext ctx, RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); @@ -172,19 +175,19 @@ public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ChannelKeyContext ctx, - List trafficStreamKeysBeingHeld) {} + IChannelKeyContext ctx, + @NonNull List trafficStreamKeysBeingHeld) {} @Override - public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - ChannelKeyContext ctx, + public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, + IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, - Instant when, - List trafficStreamKeysBeingHeld) { + @NonNull Instant when, + @NonNull List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - ChannelKeyContext ctx) {} + IChannelKeyContext ctx) {} }); var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); @@ -208,7 +211,8 @@ public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { "CapturedTrafficToHttpTransactionAccumulator that's being used in this unit test!", new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey id, RequestContext ctx, + public void onRequestReceived(UniqueReplayerRequestKey id, + IContexts.IReplayerHttpTransactionContext ctx, HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); @@ -216,7 +220,8 @@ public void onRequestReceived(UniqueReplayerRequestKey id, RequestContext ctx, } @Override - public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, + public void onFullDataReceived(UniqueReplayerRequestKey key, + IContexts.IReplayerHttpTransactionContext ctx, RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); @@ -224,17 +229,17 @@ public void onFullDataReceived(UniqueReplayerRequestKey key, RequestContext ctx, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - ChannelKeyContext ctx, - List trafficStreamKeysBeingHeld) {} + IChannelKeyContext ctx, + @NonNull List trafficStreamKeysBeingHeld) {} @Override - public void onConnectionClose(ISourceTrafficChannelKey key, int channelInteractionNumber, - ChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, - Instant when, - List trafficStreamKeysBeingHeld) { + public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, + IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + @NonNull Instant when, + @NonNull List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - ChannelKeyContext ctx) {} + IChannelKeyContext ctx) {} } ); byte[] serializedChunks; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 05d27763c..bf7ecbc81 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -4,7 +4,6 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.util.concurrent.DefaultThreadFactory; -import io.opentelemetry.api.GlobalOpenTelemetry; import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.AfterAll; @@ -21,9 +20,6 @@ import org.opensearch.migrations.replay.TimeShifter; import org.opensearch.migrations.replay.TrafficReplayer; import org.opensearch.migrations.replay.TransformationLoader; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.RequestContext; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.testutils.HttpFirstLine; import org.opensearch.migrations.testutils.PortFinder; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index 6cec1a13b..3632106f4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -10,7 +10,7 @@ import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; @@ -43,7 +43,8 @@ public void testRecordToString() { .setNodeId("n") .setNumber(7) .build(); - var tsk = new TrafficStreamKeyWithKafkaRecordId(ts, 1, 2, 123); + var contextFactory = new ChannelContextManager(); + var tsk = new TrafficStreamKeyWithKafkaRecordId(contextFactory, ts, "testRecord", 1, 2, 123); Assertions.assertEquals("n.c.7|partition=2|offset=123", tsk.toString()); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index a9b232ae4..0f8505ff0 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,28 +1,28 @@ package org.opensearch.migrations.replay; +import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.RequestContext; public class TestRequestKey { public static final String TEST_NODE_ID = "testNodeId"; public static final String DEFAULT_TEST_CONNECTION = "testConnection"; + private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("test"); + private TestRequestKey() {} - public static final RequestContext getTestConnectionRequestContext(int replayerIdx) { + public static final Contexts.HttpTransactionContext getTestConnectionRequestContext(int replayerIdx) { return getTestConnectionRequestContext(DEFAULT_TEST_CONNECTION, replayerIdx); } - public static final RequestContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { + public static Contexts.HttpTransactionContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( new PojoTrafficStreamKey(TEST_NODE_ID, connectionId, 0), 0, replayerIdx); - var smc = new SimpleMeteringClosure("test"); - var channelKeyContext = new ChannelKeyContext(rk.trafficStreamKey, smc.makeSpanContinuation("test", null)); - return new RequestContext(channelKeyContext, rk, smc.makeSpanContinuation("test2")); + var tsCtx = new TestTrafficStreamsLifecycleContext(rk.trafficStreamKey); + return new Contexts.HttpTransactionContext(tsCtx, rk, METERING_CLOSURE.makeSpanContinuation("test2")); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java new file mode 100644 index 000000000..ddea5a489 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -0,0 +1,33 @@ +package org.opensearch.migrations.replay; + +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; +import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; + +class TestTrafficStreamsLifecycleContext + extends DirectNestedSpanContext + implements IContexts.ITrafficStreamsLifecycleContext { + private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("test"); + + private final ITrafficStreamKey trafficStreamKey; + + public TestTrafficStreamsLifecycleContext(ITrafficStreamKey tsk) { + super(new ChannelKeyContext(tsk, METERING_CLOSURE.makeSpanContinuation("channel", null))); + this.trafficStreamKey = tsk; + setCurrentSpan(METERING_CLOSURE.makeSpanContinuation("stream")); + } + + @Override + public IChannelKeyContext getChannelKeyContext() { + return getLogicalEnclosingScope(); + } + + @Override + public ITrafficStreamKey getTrafficStreamKey() { + return trafficStreamKey; + } +} From 3d601066b3a37ff47a3f1878d0c3fb948e32fb50 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 15 Dec 2023 13:05:53 -0500 Subject: [PATCH 22/94] Lots of refactoring to get a couple more test cases to pass. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 4 +- .../migrations/tracing/EmptyContext.java | 4 +- ... => IScopedInstrumentationAttributes.java} | 6 +-- .../tracing/ISpanWithParentGenerator.java | 2 +- .../tracing/IWithStartTimeAndAttributes.java | 2 +- .../tracing/SimpleMeteringClosure.java | 16 +++---- .../commoncontexts/IConnectionContext.java | 6 +-- .../IHttpTransactionContext.java | 4 +- .../migrations/replay/Accumulation.java | 4 -- .../replay/RequestResponsePacketPair.java | 4 +- ...hKey.java => PojoTrafficStreamAndKey.java} | 2 +- .../datatypes/PojoTrafficStreamKey.java | 33 +++----------- .../PojoTrafficStreamKeyAndContext.java | 44 +++++++++++++++++++ .../kafka/KafkaTrafficCaptureSource.java | 4 +- .../TrafficStreamKeyWithKafkaRecordId.java | 4 +- .../tracing/AbstractNestedSpanContext.java | 9 ++-- .../tracing/DirectNestedSpanContext.java | 4 +- .../migrations/replay/tracing/IContexts.java | 20 ++++----- .../tracing/IndirectNestedSpanContext.java | 4 +- .../traffic/source/InputStreamOfTraffic.java | 35 ++++++++++++++- .../source/TrafficStreamWithEmbeddedKey.java | 23 ---------- .../util/TrafficChannelKeyFormatter.java | 24 ++++++++++ .../replay/BlockingTrafficSourceTest.java | 26 ++++++----- .../CompressedFileTrafficCaptureSource.java | 6 +-- ...xpiringTrafficStreamMapSequentialTest.java | 11 +++-- ...ExpiringTrafficStreamMapUnorderedTest.java | 12 +++-- .../replay/FullTrafficReplayerTest.java | 13 +++--- .../KafkaRestartingTrafficReplayerTest.java | 11 ++--- .../replay/ParsedHttpMessagesAsDictsTest.java | 9 ++-- .../replay/ResultsToLogsConsumerTest.java | 11 +++-- ...afficToHttpTransactionAccumulatorTest.java | 9 ++-- .../replay/V0_1TrafficCaptureSource.java | 10 ++--- .../KafkaCommitsWorkBetweenLongPolls.java | 7 +-- .../replay/kafka/KafkaTestUtils.java | 12 +++-- .../migrations/replay/TestRequestKey.java | 9 ++-- 35 files changed, 233 insertions(+), 171 deletions(-) rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/{IWithAttributes.java => IScopedInstrumentationAttributes.java} (84%) rename TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/{PojoTrafficStreamWithKey.java => PojoTrafficStreamAndKey.java} (83%) create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/TrafficStreamWithEmbeddedKey.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/TrafficChannelKeyFormatter.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 5308902fd..e0edb8b2a 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -7,13 +7,13 @@ import lombok.Getter; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Instant; @AllArgsConstructor -public class KafkaRecordContext implements IWithAttributes, IWithStartTime { +public class KafkaRecordContext implements IScopedInstrumentationAttributes, IWithStartTime { static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); static final AttributeKey RECORD_SIZE_ATTR = AttributeKey.longKey("recordSize"); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java index d44a356c3..97cc56cbf 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java @@ -3,7 +3,7 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; -public class EmptyContext implements IWithAttributes { +public class EmptyContext implements IScopedInstrumentationAttributes { public static final EmptyContext singleton = new EmptyContext(); private EmptyContext() {} @@ -14,7 +14,7 @@ public Span getCurrentSpan() { } @Override - public IWithAttributes getEnclosingScope() { + public IScopedInstrumentationAttributes getEnclosingScope() { return null; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java similarity index 84% rename from TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 8b2f971ca..5ef7b129a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -6,8 +6,8 @@ import java.util.ArrayList; -public interface IWithAttributes { - IWithAttributes getEnclosingScope(); +public interface IScopedInstrumentationAttributes { + IScopedInstrumentationAttributes getEnclosingScope(); Span getCurrentSpan(); @@ -21,7 +21,7 @@ default Attributes getPopulatedAttributes() { default AttributesBuilder getPopulatedAttributesBuilder() { var currentObj = this; - var stack = new ArrayList(); + var stack = new ArrayList(); var builder = Attributes.builder(); while (currentObj != null) { stack.add(currentObj); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java index bdd4dc066..e9180fde7 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java @@ -5,5 +5,5 @@ import java.util.function.BiFunction; -public interface ISpanWithParentGenerator extends BiFunction { +public interface ISpanWithParentGenerator extends BiFunction { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 0f490b6c8..1c6986815 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -1,4 +1,4 @@ package org.opensearch.migrations.tracing; -public interface IWithStartTimeAndAttributes extends IWithStartTime, IWithAttributes { +public interface IWithStartTimeAndAttributes extends IWithStartTime, IScopedInstrumentationAttributes { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java index 3714739c7..d16018af1 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java @@ -81,11 +81,11 @@ public static void initializeOpenTelemetry(String serviceName, String collectorE //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); } - public void meterIncrementEvent(IWithAttributes ctx, String eventName) { + public void meterIncrementEvent(IScopedInstrumentationAttributes ctx, String eventName) { meterIncrementEvent(ctx, eventName, 1); } - public void meterIncrementEvent(IWithAttributes ctx, String eventName, long increment) { + public void meterIncrementEvent(IScopedInstrumentationAttributes ctx, String eventName, long increment) { if (ctx == null) { return; } @@ -95,7 +95,7 @@ public void meterIncrementEvent(IWithAttributes ctx, String eventName, long incr .build()); } - public void meterDeltaEvent(IWithAttributes ctx, String eventName, long delta) { + public void meterDeltaEvent(IScopedInstrumentationAttributes ctx, String eventName, long delta) { if (ctx == null) { return; } @@ -105,23 +105,23 @@ public void meterDeltaEvent(IWithAttributes ctx, String eventName, long delta) { .build()); } - public void meterHistogramMillis(T ctx, String eventName) { + public void meterHistogramMillis(T ctx, String eventName) { meterHistogram(ctx, eventName, "ms", Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); } - public void meterHistogramMicros(T ctx, String eventName) { + public void meterHistogramMicros(T ctx, String eventName) { meterHistogram(ctx, eventName, "us", Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); } - public void meterHistogramMillis(IWithAttributes ctx, String eventName, Duration between) { + public void meterHistogramMillis(IScopedInstrumentationAttributes ctx, String eventName, Duration between) { meterHistogram(ctx, eventName, "ms", between.toMillis()); } - public void meterHistogramMicros(IWithAttributes ctx, String eventName, Duration between) { + public void meterHistogramMicros(IScopedInstrumentationAttributes ctx, String eventName, Duration between) { meterHistogram(ctx, eventName, "us", between.toNanos()*1000); } - public void meterHistogram(IWithAttributes ctx, String eventName, String units, long value) { + public void meterHistogram(IScopedInstrumentationAttributes ctx, String eventName, String units, long value) { if (ctx == null) { return; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index bd3781b22..74be9003a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -2,9 +2,9 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IConnectionContext extends IWithAttributes { +public interface IConnectionContext extends IScopedInstrumentationAttributes { static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); @@ -12,7 +12,7 @@ public interface IConnectionContext extends IWithAttributes { String getNodeId(); @Override - default IWithAttributes getEnclosingScope() { return null; } + default IScopedInstrumentationAttributes getEnclosingScope() { return null; } @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index 0b610a262..bebcee137 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -2,9 +2,9 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IHttpTransactionContext extends IWithAttributes { +public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); long getSourceRequestIndex(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index 34b5c9a92..325b4efed 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -1,12 +1,8 @@ package org.opensearch.migrations.replay; import lombok.NonNull; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.tracing.IWithAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.time.Instant; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index cd33c7760..389636af6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -8,7 +8,7 @@ import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IContexts; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.nio.charset.StandardCharsets; @@ -33,7 +33,7 @@ public enum ReconstructionStatus { ReconstructionStatus completionStatus; // switch between RequestAccumulation/ResponseAccumulation objects when we're parsing, // or just leave this null, in which case, the context from the trafficStreamKey should be used - private IWithAttributes requestOrResponseAccumulationContext; + private IScopedInstrumentationAttributes requestOrResponseAccumulationContext; public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, int startingSourceRequestIndex, int indexOfCurrentRequest) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamWithKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamAndKey.java similarity index 83% rename from TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamWithKey.java rename to TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamAndKey.java index 62837c9eb..4387f46a6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamWithKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamAndKey.java @@ -8,7 +8,7 @@ @AllArgsConstructor @Getter -public class PojoTrafficStreamWithKey implements ITrafficStreamWithKey { +public class PojoTrafficStreamAndKey implements ITrafficStreamWithKey { public final TrafficStream stream; public final ITrafficStreamKey key; } \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java index fb931f674..efbe294a5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java @@ -1,30 +1,15 @@ package org.opensearch.migrations.replay.datatypes; -import java.util.StringJoiner; - import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.NonNull; -import lombok.Setter; -import org.opensearch.migrations.replay.tracing.IContexts; -import org.opensearch.migrations.trafficcapture.protos.TrafficStream; -import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; +import org.opensearch.migrations.replay.util.TrafficChannelKeyFormatter; @EqualsAndHashCode() -public class PojoTrafficStreamKey implements ITrafficStreamKey { - private final String nodeId; - private final String connectionId; - private final int trafficStreamIndex; - @Getter - @Setter - @NonNull - IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; - - public PojoTrafficStreamKey(TrafficStream stream) { - this(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); - } +abstract class PojoTrafficStreamKey implements ITrafficStreamKey { + protected final String nodeId; + protected final String connectionId; + protected final int trafficStreamIndex; - public PojoTrafficStreamKey(String nodeId, String connectionId, int index) { + protected PojoTrafficStreamKey(String nodeId, String connectionId, int index) { this.nodeId = nodeId; this.connectionId = connectionId; this.trafficStreamIndex = index; @@ -47,10 +32,6 @@ public int getTrafficStreamIndex() { @Override public String toString() { - return new StringJoiner(".") - .add(nodeId) - .add(connectionId) - .add(""+trafficStreamIndex) - .toString(); + return TrafficChannelKeyFormatter.format(nodeId, connectionId); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java new file mode 100644 index 000000000..fbf837e4c --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java @@ -0,0 +1,44 @@ +package org.opensearch.migrations.replay.datatypes; + +import java.util.StringJoiner; +import java.util.function.Function; + +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NonNull; +import lombok.Setter; +import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.trafficcapture.protos.TrafficStream; +import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; + +@EqualsAndHashCode(callSuper = true) +public class PojoTrafficStreamKeyAndContext extends PojoTrafficStreamKey { + @Getter + @Setter + @NonNull + IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; + + public static PojoTrafficStreamKeyAndContext build(TrafficStream stream, Function contextSupplier) { + var rval = new PojoTrafficStreamKeyAndContext(stream.getNodeId(), stream.getConnectionId(), + TrafficStreamUtils.getTrafficStreamIndex(stream)); + rval.setTrafficStreamsContext(contextSupplier.apply(rval)); + return rval; + } + + protected PojoTrafficStreamKeyAndContext(TrafficStream stream) { + this(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); + } + + public static PojoTrafficStreamKeyAndContext build(String nodeId, String connectionId, int index, Function contextSupplier) { + var rval = new PojoTrafficStreamKeyAndContext(nodeId, connectionId, index); + rval.setTrafficStreamsContext(contextSupplier.apply(rval)); + return rval; + } + + protected PojoTrafficStreamKeyAndContext(String nodeId, String connectionId, int index) { + super(nodeId, connectionId, index); + } + +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index f15c5c4a9..57b001591 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -11,7 +11,7 @@ import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamWithKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -201,7 +201,7 @@ public List readNextTrafficStreamSynchronously() { ": " + offsetData + " " + ts).log(); var key = new TrafficStreamKeyWithKafkaRecordId( channelContextManager::retainOrCreateContext, ts, kafkaRecord.key(), offsetData); - return (ITrafficStreamWithKey) new PojoTrafficStreamWithKey(ts, key); + return (ITrafficStreamWithKey) new PojoTrafficStreamAndKey(ts, key); } catch (InvalidProtocolBufferException e) { RuntimeException recordError = behavioralPolicy.onInvalidKafkaRecord(kafkaRecord, e); metricsLogger.atError(MetricsEvent.PARSING_TRAFFIC_STREAM_FROM_KAFKA_FAILED, recordError) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java index ee3385974..d7f0d2da3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java @@ -3,7 +3,7 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.tracing.SimpleMeteringClosure; @@ -14,7 +14,7 @@ @EqualsAndHashCode(callSuper = true) @Getter -class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKey implements KafkaCommitOffsetData { +class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext implements KafkaCommitOffsetData { public static final String TELEMETRY_SCOPE_NAME = "KafkaRecords"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java index 5d6db09c8..bbb7f611b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java @@ -4,14 +4,13 @@ import lombok.Getter; import lombok.NonNull; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithStartTime; -import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.time.Instant; -public abstract class AbstractNestedSpanContext implements - IWithAttributes, IWithStartTime { +public abstract class AbstractNestedSpanContext implements + IScopedInstrumentationAttributes, IWithStartTime { final T enclosingScope; @Getter final Instant startTime; @Getter private Span currentSpan; @@ -22,7 +21,7 @@ public AbstractNestedSpanContext(@NonNull T enclosingScope) { } @Override - public IWithAttributes getEnclosingScope() { + public IScopedInstrumentationAttributes getEnclosingScope() { return enclosingScope; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java index 214665804..66eb863f1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java @@ -1,10 +1,10 @@ package org.opensearch.migrations.replay.tracing; import lombok.NonNull; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; -public class DirectNestedSpanContext +public class DirectNestedSpanContext extends AbstractNestedSpanContext implements IWithTypedEnclosingScope { public DirectNestedSpanContext(@NonNull T enclosingScope) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java index aecb2ade7..47b4ad8bc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java @@ -5,19 +5,19 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; public class IContexts { public static interface IKafkaRecordContext - extends IWithAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); String getRecordId(); default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IWithAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); + return IScopedInstrumentationAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); } } @@ -57,23 +57,23 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } public static interface IRequestAccumulationContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public static interface IResponseAccumulationContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public static interface IRequestTransformationContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public static interface IWaitingForHttpResponseContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public static interface IReceivingHttpResponseContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public static interface IRequestSendingContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public static interface ITupleHandlingContext - extends IWithAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java index 6c3f2883d..1c8664c3b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java @@ -1,9 +1,9 @@ package org.opensearch.migrations.replay.tracing; import lombok.NonNull; -import org.opensearch.migrations.tracing.IWithAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public abstract class IndirectNestedSpanContext +public abstract class IndirectNestedSpanContext extends AbstractNestedSpanContext { public IndirectNestedSpanContext(@NonNull D enclosingScope) { super(enclosingScope); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 4f9cd53c5..5daf74a5b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -1,8 +1,17 @@ package org.opensearch.migrations.replay.traffic.source; +import lombok.Getter; import lombok.Lombok; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.EOFException; @@ -14,13 +23,33 @@ @Slf4j public class InputStreamOfTraffic implements ISimpleTrafficCaptureSource { + private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("InputStreamOfTraffic"); private final InputStream inputStream; private final AtomicInteger trafficStreamsRead = new AtomicInteger(); + private final ChannelContextManager channelContextManager; public InputStreamOfTraffic(InputStream inputStream) { + this.channelContextManager = new ChannelContextManager(); this.inputStream = inputStream; } + private static class IOSTrafficStreamContext + extends DirectNestedSpanContext + implements IContexts.ITrafficStreamsLifecycleContext { + @Getter private final ITrafficStreamKey trafficStreamKey; + + public IOSTrafficStreamContext(@NonNull IChannelKeyContext ctx, ITrafficStreamKey tsk) { + super(ctx); + this.trafficStreamKey = tsk; + setCurrentSpan(METERING_CLOSURE.makeSpanContinuation("trafficStreamLifecycle")); + } + + @Override + public IChannelKeyContext getChannelKeyContext() { + return getImmediateEnclosingScope(); + } + } + /** * Returns a CompletableFuture to a TrafficStream object or sets the cause exception to an * EOFException if the input has been exhausted. @@ -40,7 +69,11 @@ public CompletableFuture> readNextTrafficStreamChunk var ts = builder.build(); trafficStreamsRead.incrementAndGet(); log.trace("Parsed traffic stream #{}: {}", trafficStreamsRead.get(), ts); - return List.of(new TrafficStreamWithEmbeddedKey(ts)); + return List.of(new PojoTrafficStreamAndKey(ts, + PojoTrafficStreamKeyAndContext.build(ts, tsk-> { + var channelCtx = channelContextManager.retainOrCreateContext(tsk); + return new IOSTrafficStreamContext(channelCtx, tsk); + }))); }).exceptionally(e->{ var ecf = new CompletableFuture>(); ecf.completeExceptionally(e.getCause()); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/TrafficStreamWithEmbeddedKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/TrafficStreamWithEmbeddedKey.java deleted file mode 100644 index 90cdb95cc..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/TrafficStreamWithEmbeddedKey.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.opensearch.migrations.replay.traffic.source; - -import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; -import org.opensearch.migrations.trafficcapture.protos.TrafficStream; - -public class TrafficStreamWithEmbeddedKey implements ITrafficStreamWithKey { - public final TrafficStream stream; - - public TrafficStreamWithEmbeddedKey(TrafficStream stream) { - this.stream = stream; - } - - @Override - public ITrafficStreamKey getKey() { - return new PojoTrafficStreamKey(stream); - } - - @Override - public TrafficStream getStream() { - return stream; - } -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/TrafficChannelKeyFormatter.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/TrafficChannelKeyFormatter.java new file mode 100644 index 000000000..b0cb18148 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/TrafficChannelKeyFormatter.java @@ -0,0 +1,24 @@ +package org.opensearch.migrations.replay.util; + +import java.util.StringJoiner; + +public class TrafficChannelKeyFormatter { + private TrafficChannelKeyFormatter() {} + + public static String format(String nodeId, String connectionId) { + return new StringJoiner(".") + .add(nodeId) + .add(connectionId) + .toString(); + } + + public static String format(String nodeId, String connectionId, int trafficStreamIndex) { + return new StringJoiner(".") + .add(nodeId) + .add(connectionId) + .add("" + trafficStreamIndex) + .toString(); + } +} + + diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 3856676e5..60eb2c163 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -5,10 +5,11 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; @@ -85,18 +86,19 @@ public CompletableFuture> readNextTrafficStreamChunk var t = sourceStartTime.plus(Duration.ofMillis(i)); log.debug("Built timestamp for " + i); - return CompletableFuture.completedFuture(List.of(new TrafficStreamWithEmbeddedKey( - TrafficStream.newBuilder() - .setNumberOfThisLastChunk(0) - .setConnectionId("conn_" + i) - .addSubStream(TrafficObservation.newBuilder() - .setTs(Timestamp.newBuilder() - .setSeconds(t.getEpochSecond()) - .setNanos(t.getNano()) - .build()) - .setClose(CloseObservation.getDefaultInstance()) + var ts = TrafficStream.newBuilder() + .setNumberOfThisLastChunk(0) + .setConnectionId("conn_" + i) + .addSubStream(TrafficObservation.newBuilder() + .setTs(Timestamp.newBuilder() + .setSeconds(t.getEpochSecond()) + .setNanos(t.getNano()) .build()) - .build()))); + .setClose(CloseObservation.getDefaultInstance()) + .build()) + .build(); + var key = PojoTrafficStreamKeyAndContext.build(ts, TestTrafficStreamsLifecycleContext::new); + return CompletableFuture.completedFuture(List.of(new PojoTrafficStreamAndKey(ts, key))); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java index 4ffa9aa15..25cb216ce 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java @@ -44,12 +44,12 @@ public CompletableFuture> readNextTrafficStreamChunk } return trafficSource.readNextTrafficStreamChunk() .thenApply(ltswk -> { - var transformed = ltswk.stream().map(this::modifyTrafficStream).collect(Collectors.toList()); + var transformedTrafficStream = ltswk.stream().map(this::modifyTrafficStream).collect(Collectors.toList()); var oldValue = numberOfTrafficStreamsToRead.get(); - var newValue = oldValue - transformed.size(); + var newValue = oldValue - transformedTrafficStream.size(); var exchangeResult = numberOfTrafficStreamsToRead.compareAndExchange(oldValue, newValue); assert exchangeResult == oldValue : "didn't expect to be running with a race condition here"; - return transformed; + return transformedTrafficStream; }); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java index b74e23606..4d1f7c1b4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java @@ -2,7 +2,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; @@ -33,12 +33,15 @@ public void onExpireAccumulation(String partitionId, var expiredCountsPerLoop = new ArrayList(); for (int i=0; inew Accumulation(tsk, 0)); createdAccumulations.add(accumulation); - expiringMap.expireOldEntries(new PojoTrafficStreamKey(TEST_NODE_ID_STRING, connectionGenerator.apply(i), 0), + expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, + connectionGenerator.apply(i), 0, TestTrafficStreamsLifecycleContext::new), accumulation, ts); - var rrPair = createdAccumulations.get(i).getOrCreateTransactionPair(new PojoTrafficStreamKey("n","c",1)); + var rrPair = createdAccumulations.get(i).getOrCreateTransactionPair( + PojoTrafficStreamKeyAndContext.build("n","c",1, TestTrafficStreamsLifecycleContext::new)); rrPair.addResponseData(ts, ("Add"+i).getBytes(StandardCharsets.UTF_8)); expiredCountsPerLoop.add(expiredAccumulations.size()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java index b214cdc5e..bed1fd9fd 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java @@ -2,7 +2,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; @@ -36,12 +36,16 @@ public void onExpireAccumulation(String partitionId, var expiredCountsPerLoop = new ArrayList(); for (int i=0; inew Accumulation(tsk, 0)); - expiringMap.expireOldEntries(new PojoTrafficStreamKey(TEST_NODE_ID_STRING, connectionGenerator.apply(i), 0), accumulation, ts); + expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, + connectionGenerator.apply(i), 0, TestTrafficStreamsLifecycleContext::new), + accumulation, ts); createdAccumulations.add(accumulation); if (accumulation != null) { - var rrPair = accumulation.getOrCreateTransactionPair(new PojoTrafficStreamKey("n","c",1)); + var rrPair = accumulation.getOrCreateTransactionPair(PojoTrafficStreamKeyAndContext.build("n","c",1, + TestTrafficStreamsLifecycleContext::new)); rrPair.addResponseData(ts, ("Add" + i).getBytes(StandardCharsets.UTF_8)); } expiredCountsPerLoop.add(expiredAccumulations.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index a7ac23fc8..a91e78172 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -12,8 +12,8 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamWithKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; @@ -188,9 +188,10 @@ public TrafficStreamCursorKey(TrafficStream stream, int arrayIndex) { nodeId = stream.getNodeId(); trafficStreamIndex = TrafficStreamUtils.getTrafficStreamIndex(stream); this.arrayIndex = arrayIndex; - var key = new PojoTrafficStreamKey(connectionId, nodeId, trafficStreamIndex); - this.trafficStreamsContext = new TestTrafficStreamsLifecycleContext(key); - key.setTrafficStreamsContext(this.trafficStreamsContext); + var key = PojoTrafficStreamKeyAndContext.build(connectionId, nodeId, trafficStreamIndex, tsk-> + new TestTrafficStreamsLifecycleContext(tsk)); + trafficStreamsContext = key.getTrafficStreamsContext(); + key.setTrafficStreamsContext(trafficStreamsContext); } @Override @@ -241,7 +242,7 @@ public CompletableFuture> readNextTrafficStreamChunk pQueue.add(key); cursorHighWatermark = idx; } - return CompletableFuture.supplyAsync(()->List.of(new PojoTrafficStreamWithKey(stream, key))); + return CompletableFuture.supplyAsync(()->List.of(new PojoTrafficStreamAndKey(stream, key))); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index ab2c5caad..3859915c3 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -8,16 +8,13 @@ import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; -import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.opensearch.migrations.replay.kafka.KafkaTestUtils; import org.opensearch.migrations.replay.kafka.KafkaTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -119,8 +116,8 @@ private void loadStreamsToKafka(KafkaConsumer kafkaConsumer, var kafkaProducer = buildKafkaProducer(); var counter = new AtomicInteger(); loadStreamsAsynchronouslyWithCloseableResource(kafkaConsumer, streams, s -> s.forEach(trafficStream -> - KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, new TrafficStreamWithEmbeddedKey(trafficStream), - TEST_TOPIC_NAME, "KEY_" + counter.incrementAndGet()))); + KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, + trafficStream, TEST_TOPIC_NAME, "KEY_" + counter.incrementAndGet()))); Thread.sleep(PRODUCER_SLEEP_INTERVAL_MS); } @@ -170,8 +167,8 @@ Producer buildKafkaProducer() { List chunks = null; chunks = originalTrafficSource.readNextTrafficStreamChunk().get(); for (int j = 0; j < chunks.size(); ++j) { - KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, chunks.get(j), TEST_TOPIC_NAME, - "KEY_" + i + "_" + j); + KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, chunks.get(j).getStream(), + TEST_TOPIC_NAME, "KEY_" + i + "_" + j); Thread.sleep(PRODUCER_SLEEP_INTERVAL_MS); } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index 8025f52e0..a048f571d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -2,20 +2,17 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.MockMetricsBuilder; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import java.util.Map; import java.util.Optional; -import static org.junit.jupiter.api.Assertions.*; - class ParsedHttpMessagesAsDictsTest { - private static final PojoTrafficStreamKey TEST_TRAFFIC_STREAM_KEY = - new PojoTrafficStreamKey("N","C",1); + private static final PojoTrafficStreamKeyAndContext TEST_TRAFFIC_STREAM_KEY = + PojoTrafficStreamKeyAndContext.build("N","C",1, TestTrafficStreamsLifecycleContext::new); ParsedHttpMessagesAsDicts makeTestData() { return makeTestData(null, null); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index fa52d308e..0885c3d15 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -15,7 +15,7 @@ import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumerTest; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.MockMetricsBuilder; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; @@ -77,7 +77,8 @@ public void testTupleNewWithNullKeyThrows() { @Test public void testOutputterWithNulls() throws IOException { var emptyTuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(new PojoTrafficStreamKey(NODE_ID,"c",0), 0, 0), + new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, + TestTrafficStreamsLifecycleContext::new), 0, 0), null, null, null, null, null, null); try (var closeableLogSetup = new CloseableLogSetup()) { var consumer = new TupleParserChainConsumer(null, new ResultsToLogsConsumer()); @@ -93,7 +94,8 @@ public void testOutputterWithNulls() throws IOException { public void testOutputterWithException() throws IOException { var exception = new Exception(TEST_EXCEPTION_MESSAGE); var emptyTuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(new PojoTrafficStreamKey(NODE_ID,"c",0), 0, 0), + new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, + TestTrafficStreamsLifecycleContext::new), 0, 0), null, null, null, null, exception, null); try (var closeableLogSetup = new CloseableLogSetup()) { @@ -228,7 +230,8 @@ public void testOutputterForPost() throws IOException { @Test private void testOutputterForRequest(String requestResourceName, String expected) throws IOException { - var trafficStreamKey = new PojoTrafficStreamKey(NODE_ID,"c",0); + var trafficStreamKey = PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, + TestTrafficStreamsLifecycleContext::new); var requestCtx = TestRequestKey.getTestConnectionRequestContext(0); trafficStreamKey.setTrafficStreamsContext(requestCtx.getImmediateEnclosingScope()); var sourcePair = new RequestResponsePacketPair(trafficStreamKey, 0, 0); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 1e05fdb7e..2b8cfdee8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -12,12 +12,12 @@ import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; -import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; @@ -264,7 +264,10 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel }); var tsList = trafficStreams.collect(Collectors.toList()); trafficStreams = tsList.stream(); - trafficStreams.forEach(ts->trafficAccumulator.accept(new TrafficStreamWithEmbeddedKey(ts))); + ; + trafficStreams.forEach(ts->trafficAccumulator.accept( + new PojoTrafficStreamAndKey(ts, PojoTrafficStreamKeyAndContext.build(ts, TestTrafficStreamsLifecycleContext::new) + ))); trafficAccumulator.close(); return tsIndicesReceived; } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java index e350bc0a0..ffbf685db 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java @@ -1,15 +1,12 @@ package org.opensearch.migrations.replay; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; -import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; -import java.io.FileInputStream; import java.io.IOException; import java.util.HashMap; import java.util.Optional; -import java.util.zip.GZIPInputStream; public class V0_1TrafficCaptureSource extends CompressedFileTrafficCaptureSource { @@ -32,9 +29,10 @@ protected ITrafficStreamWithKey modifyTrafficStream(ITrafficStreamWithKey stream outgoingBuilder.setNumberOfThisLastChunk(incoming.getNumberOfThisLastChunk()); } var progress = connectionProgressMap.get(incoming.getConnectionId()); + var key = streamWithKey.getKey(); if (progress == null) { progress = new Progress(); - progress.lastWasRead = streamWithKey.getKey().getTrafficStreamIndex() != 0; + progress.lastWasRead = key.getTrafficStreamIndex() != 0; connectionProgressMap.put(incoming.getConnectionId(), progress); } outgoingBuilder.setLastObservationWasUnterminatedRead(progress.lastWasRead); @@ -44,7 +42,7 @@ protected ITrafficStreamWithKey modifyTrafficStream(ITrafficStreamWithKey stream if (incoming.hasNumberOfThisLastChunk()) { connectionProgressMap.remove(incoming.getConnectionId()); } - return new TrafficStreamWithEmbeddedKey(outgoingBuilder.build()); + return new PojoTrafficStreamAndKey(outgoingBuilder.build(), key); } private static class Progress { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index cbc0bed4d..deead7181 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -10,8 +10,6 @@ import org.junit.jupiter.api.Test; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.replay.traffic.source.TrafficStreamWithEmbeddedKey; -import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; @@ -20,9 +18,7 @@ import java.time.Duration; import java.time.Instant; import java.util.List; -import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.locks.Condition; @Slf4j @Testcontainers(disabledWithoutDocker = true) @@ -96,7 +92,6 @@ static Instant getTimeAtPoint(int i) { private void sendNextMessage(Producer kafkaProducer, int i) { var ts = KafkaTestUtils.makeTestTrafficStreamWithFixedTime(getTimeAtPoint(i), i); - KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, new TrafficStreamWithEmbeddedKey(ts), - TEST_TOPIC_NAME, ""+i); + KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, ts, TEST_TOPIC_NAME, ""+i); } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTestUtils.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTestUtils.java index a5eed831a..e7e2136f2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTestUtils.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTestUtils.java @@ -10,7 +10,9 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.Tag; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.replay.util.TrafficChannelKeyFormatter; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -78,16 +80,18 @@ static TrafficStream makeTestTrafficStreamWithFixedTime(Instant t, int i) { @SneakyThrows public static void writeTrafficStreamRecord(Producer kafkaProducer, - ITrafficStreamWithKey trafficStreamAndKey, + TrafficStream trafficStream, String TEST_TOPIC_NAME, String recordId) { while (true) { try { - var record = new ProducerRecord(TEST_TOPIC_NAME, recordId, trafficStreamAndKey.getStream().toByteArray()); - log.info("sending record with trafficStream=" + trafficStreamAndKey.getKey()); + var record = new ProducerRecord(TEST_TOPIC_NAME, recordId, trafficStream.toByteArray()); + var tsKeyStr = TrafficChannelKeyFormatter.format(trafficStream.getNodeId(), + trafficStream.getConnectionId()); + log.info("sending record with trafficStream=" + tsKeyStr); var sendFuture = kafkaProducer.send(record, (metadata, exception) -> { log.atInfo().setCause(exception).setMessage(() -> "completed send of TrafficStream with key=" + - trafficStreamAndKey.getKey() + " metadata=" + metadata).log(); + tsKeyStr + " metadata=" + metadata).log(); }); var recordMetadata = sendFuture.get(); log.info("finished publishing record... metadata=" + recordMetadata); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 0f8505ff0..4c0df1bed 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -2,7 +2,7 @@ import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; public class TestRequestKey { @@ -20,9 +20,10 @@ public static final Contexts.HttpTransactionContext getTestConnectionRequestCont public static Contexts.HttpTransactionContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( - new PojoTrafficStreamKey(TEST_NODE_ID, connectionId, 0), + PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, + tsk -> new TestTrafficStreamsLifecycleContext(tsk)), 0, replayerIdx); - var tsCtx = new TestTrafficStreamsLifecycleContext(rk.trafficStreamKey); - return new Contexts.HttpTransactionContext(tsCtx, rk, METERING_CLOSURE.makeSpanContinuation("test2")); + return new Contexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), + rk, METERING_CLOSURE.makeSpanContinuation("test2")); } } From ad6bd13a9dc2c9ced1a7cdd1027fa783448fcf5e Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 15 Dec 2023 14:00:38 -0500 Subject: [PATCH 23/94] Test fixes Signed-off-by: Greg Schohn --- .../jsonJoltMessageTransformerProvider/build.gradle | 1 + .../replay/CapturedTrafficToHttpTransactionAccumulator.java | 2 +- .../migrations/replay/datatypes/PojoTrafficStreamKey.java | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle index a974d3aea..c1b9fbbda 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle @@ -13,6 +13,7 @@ dependencies { implementation project(':replayerPlugins:jsonMessageTransformers:jsonJoltMessageTransformer') testImplementation project(':replayerPlugins:jsonMessageTransformers:jsonJoltMessageTransformer') + testImplementation project(':coreUtilities') testImplementation project(':trafficReplayer') testImplementation testFixtures(project(path: ':testUtilities')) testImplementation testFixtures(project(path: ':trafficReplayer')) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 04eb1a7a8..a68072966 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -421,7 +421,7 @@ private boolean handleEndOfRequest(Accumulation accumulation) { private void handleEndOfResponse(Accumulation accumulation, RequestResponsePacketPair.ReconstructionStatus status) { assert accumulation.state == Accumulation.State.ACCUMULATING_WRITES; var rrPair = accumulation.getRrPair(); - var requestKey = rrPair.getRequestContext().getLogicalEnclosingScope().getReplayerRequestKey(); + var requestKey = rrPair.getResponseContext().getLogicalEnclosingScope().getReplayerRequestKey(); metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) .setAttribute(MetricsAttributeKey.REQUEST_ID, requestKey.toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java index efbe294a5..09994d650 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java @@ -32,6 +32,6 @@ public int getTrafficStreamIndex() { @Override public String toString() { - return TrafficChannelKeyFormatter.format(nodeId, connectionId); + return TrafficChannelKeyFormatter.format(nodeId, connectionId, trafficStreamIndex); } } From 07ae016d0316e823d3905158300d273129c796db Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 15 Dec 2023 23:33:23 -0500 Subject: [PATCH 24/94] Begin to cleanup endspans for some of the contexts. Lots of bugs remain, but the replayer isn't crashing. Signed-off-by: Greg Schohn --- .../IScopedInstrumentationAttributes.java | 4 ++ ...nditionallyReliableLoggingHttpHandler.java | 2 +- .../netty/LoggingHttpHandler.java | 8 +-- .../netty/ProxyChannelInitializer.java | 2 +- ...edTrafficToHttpTransactionAccumulator.java | 7 +- .../replay/RequestResponsePacketPair.java | 6 +- .../migrations/replay/TrafficReplayer.java | 1 + .../http/HttpJsonTransformingConsumer.java | 2 +- .../PojoTrafficStreamKeyAndContext.java | 5 +- .../kafka/KafkaTrafficCaptureSource.java | 17 ++++- .../replay/kafka/TrackingKafkaConsumer.java | 68 +++++++++++++++---- .../replay/tracing/ChannelContextManager.java | 43 +++++++++--- .../replay/tracing/ChannelKeyContext.java | 17 ----- .../migrations/replay/tracing/Contexts.java | 6 ++ .../migrations/replay/tracing/IContexts.java | 20 +++--- 15 files changed, 140 insertions(+), 68 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 5ef7b129a..511cc710a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -11,6 +11,10 @@ public interface IScopedInstrumentationAttributes { Span getCurrentSpan(); + default void endSpan() { + getCurrentSpan().end(); + } + default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index 05a31e8cd..518f368f8 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -38,7 +38,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); METERING_CLOSURE.meterHistogramMicros(messageContext, t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro"); - messageContext.getCurrentSpan().end(); + messageContext.endSpan(); if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 05061cdcc..c8dd2d888 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -210,7 +210,7 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { METERING_CLOSURE.meterIncrementEvent(messageContext, "handlerRemoved"); - messageContext.getCurrentSpan().end(); + messageContext.endSpan(); messageContext.getEnclosingScope().currentSpan.end(); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { @@ -241,7 +241,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (messageContext.getState() == HttpMessageContext.HttpTransactionState.RESPONSE) { - messageContext.getCurrentSpan().end(); + messageContext.endSpan(); rotateNextMessageContext(HttpMessageContext.HttpTransactionState.REQUEST); } var timestamp = Instant.now(); @@ -269,7 +269,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (requestParsingHandler.haveParsedFullRequest) { - messageContext.getCurrentSpan().end(); + messageContext.endSpan(); var httpRequest = requestParsingHandler.resetCurrentRequest(); captureState.liveReadObservationsInOffloader = false; captureState.advanceStateModelIntoResponseGather(); @@ -292,7 +292,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (messageContext.getState() != HttpMessageContext.HttpTransactionState.RESPONSE) { - messageContext.getCurrentSpan().end(); + messageContext.endSpan(); rotateNextMessageContext(HttpMessageContext.HttpTransactionState.RESPONSE); } var bb = (ByteBuf) msg; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 6ff60ee50..919b5b0c2 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -48,7 +48,7 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler("n", "c", + ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler("", connectionId, connectionCaptureFactory, requestCapturePredicate, this::shouldGuaranteeMessageOffloading)); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index a68072966..bd68ac710 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -105,7 +105,7 @@ private static class SpanWrappingAccumulationCallbacks { public void onRequestReceived(@NonNull Accumulation accum, IContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { - requestCtx.getCurrentSpan().end(); + requestCtx.endSpan(); underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), requestCtx.getLogicalEnclosingScope(), request); } @@ -113,7 +113,7 @@ public void onRequestReceived(@NonNull Accumulation accum, public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @NonNull RequestResponsePacketPair rrpp) { var responseCtx = rrpp.getResponseContext(); - responseCtx.getCurrentSpan().end(); + responseCtx.endSpan(); underlying.onFullDataReceived(key, responseCtx.getLogicalEnclosingScope(), rrpp); } @@ -122,7 +122,6 @@ public void onConnectionClose(@NonNull Accumulation accum, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { var tsCtx = accum.trafficChannelKey.getTrafficStreamsContext(); - tsCtx.getCurrentSpan().end(); underlying.onConnectionClose(accum.trafficChannelKey, accum.startingSourceRequestIndex + accum.startingSourceRequestIndex, tsCtx, status, when, trafficStreamKeysBeingHeld); @@ -131,13 +130,11 @@ public void onConnectionClose(@NonNull Accumulation accum, public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, IContexts.ITrafficStreamsLifecycleContext tsCtx, @NonNull List trafficStreamKeysBeingHeld) { - tsCtx.getCurrentSpan().end(); underlying.onTrafficStreamsExpired(status, tsCtx.getLogicalEnclosingScope(), trafficStreamKeysBeingHeld); } public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) { var tsCtx = tsk.getTrafficStreamsContext(); - tsCtx.getCurrentSpan().end(); underlying.onTrafficStreamIgnored(tsk, tsk.getTrafficStreamsContext()); } }; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 389636af6..231bf89ff 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -76,10 +76,8 @@ public IContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { public void rotateRequestGatheringToResponse() { var looseCtx = requestOrResponseAccumulationContext; assert looseCtx instanceof IContexts.IRequestAccumulationContext; - var ctx = (IContexts.IRequestAccumulationContext) looseCtx; - var parentCtx = ctx.getLogicalEnclosingScope(); - ctx.getCurrentSpan().end(); - requestOrResponseAccumulationContext = new Contexts.ResponseAccumulationContext(parentCtx, + requestOrResponseAccumulationContext = new Contexts.ResponseAccumulationContext( + getRequestContext().getLogicalEnclosingScope(), Accumulation.METERING_CLOSURE.makeSpanContinuation("accumulatingResponse")); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index b0ee9707e..253ecd8df 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -696,6 +696,7 @@ private void commitTrafficStreams(List trafficStreamKeysBeing private void commitTrafficStreams(List trafficStreamKeysBeingHeld, boolean shouldCommit) { if (shouldCommit && trafficStreamKeysBeingHeld != null) { for (var tsk : trafficStreamKeysBeingHeld) { + tsk.getTrafficStreamsContext().endSpan(); trafficCaptureSource.commitTrafficStream(tsk); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 89bb5c6e2..c27fb9552 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -141,7 +141,7 @@ public DiagnosticTrackableCompletableFuture { - transformationContext.getCurrentSpan().end(); + transformationContext.endSpan(); METERING_CLOSURE.meterIncrementEvent(transformationContext, t != null ? "transformRequestFailed" : "transformRequestSuccess"); METERING_CLOSURE.meterHistogramMicros(transformationContext, "transformationDuration"); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java index fbf837e4c..e945c9f40 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java @@ -18,8 +18,9 @@ public class PojoTrafficStreamKeyAndContext extends PojoTrafficStreamKey { @NonNull IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; - public static PojoTrafficStreamKeyAndContext build(TrafficStream stream, Function contextSupplier) { + public static PojoTrafficStreamKeyAndContext + build(TrafficStream stream, + Function contextSupplier) { var rval = new PojoTrafficStreamKeyAndContext(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); rval.setTrafficStreamsContext(contextSupplier.apply(rval)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 57b001591..0eeb13a6c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -13,6 +13,8 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.ChannelKeyContext; +import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.SimpleMeteringClosure; @@ -94,13 +96,24 @@ public KafkaTrafficCaptureSource(Consumer kafkaConsumer, { this.channelContextManager = new ChannelContextManager(); trackingKafkaConsumer = new TrackingKafkaConsumer(kafkaConsumer, topic, keepAliveInterval, clock, - tskList->tskList.forEach(channelContextManager::releaseContextFor)); + this::onKeyFinishedCommitting); trafficStreamsRead = new AtomicLong(); this.behavioralPolicy = behavioralPolicy; kafkaConsumer.subscribe(Collections.singleton(topic), trackingKafkaConsumer); kafkaExecutor = Executors.newSingleThreadExecutor(); } + private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { + var looseParentScope = trafficStreamKey.getTrafficStreamsContext().getEnclosingScope(); + if (!(looseParentScope instanceof Contexts.KafkaRecordContext)) { + throw new IllegalArgumentException("Expected parent context of type " + Contexts.KafkaRecordContext.class + + " instead of " + looseParentScope + " (of type=" + looseParentScope.getClass() + ")"); + } + var kafkaCtx = (Contexts.KafkaRecordContext) looseParentScope; + kafkaCtx.endSpan(); + channelContextManager.releaseContextFor((ChannelKeyContext) kafkaCtx.getImmediateEnclosingScope()); + } + public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull String brokers, @NonNull String topic, @NonNull String groupId, @@ -224,7 +237,7 @@ public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { throw new IllegalArgumentException("Expected key of type "+TrafficStreamKeyWithKafkaRecordId.class+ " but received "+trafficStreamKey+" (of type="+trafficStreamKey.getClass()+")"); } - trackingKafkaConsumer.commitKafkaKey((TrafficStreamKeyWithKafkaRecordId) trafficStreamKey); + trackingKafkaConsumer.commitKafkaKey(trafficStreamKey, (TrafficStreamKeyWithKafkaRecordId) trafficStreamKey); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 6d4cdffa1..1ad4448c3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -1,5 +1,8 @@ package org.opensearch.migrations.replay.kafka; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; @@ -13,17 +16,15 @@ import java.time.Clock; import java.time.Duration; import java.time.Instant; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.PriorityQueue; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -37,6 +38,33 @@ @Slf4j public class TrackingKafkaConsumer implements ConsumerRebalanceListener { + @AllArgsConstructor + private static class OrderedKeyHolder implements Comparable { + @Getter final long offset; + @Getter @NonNull final ITrafficStreamKey tsk; + + @Override + public int compareTo(OrderedKeyHolder o) { + return Long.compare(offset, o.offset); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + OrderedKeyHolder that = (OrderedKeyHolder) o; + + if (offset != that.offset) return false; + return tsk.equals(that.tsk); + } + + @Override + public int hashCode() { + return Long.valueOf(offset).hashCode(); + } + } + /** * The keep-alive should already be set to a fraction of the max poll timeout for * the consumer (done outside of this class). The keep-alive tells this class how @@ -60,8 +88,8 @@ public class TrackingKafkaConsumer implements ConsumerRebalanceListener { final Map partitionToOffsetLifecycleTrackerMap; // loosening visibility so that a unit test can read this final Map nextSetOfCommitsMap; - final Map> nextSetOfKeysBeingCommitted; - final java.util.function.Consumer> onCommitKeysCallback; + final Map> nextSetOfKeysContextsBeingCommitted; + final java.util.function.Consumer onCommitKeyCallback; private final Duration keepAliveInterval; private final AtomicReference lastTouchTimeRef; private final AtomicInteger consumerConnectionGeneration; @@ -69,7 +97,7 @@ public class TrackingKafkaConsumer implements ConsumerRebalanceListener { public TrackingKafkaConsumer(Consumer kafkaConsumer, String topic, Duration keepAliveInterval, Clock c, - java.util.function.Consumer> onCommitKeysCallback) { + java.util.function.Consumer onCommitKeyCallback) { this.kafkaConsumer = kafkaConsumer; this.topic = topic; this.clock = c; @@ -79,8 +107,8 @@ public TrackingKafkaConsumer(Consumer kafkaConsumer, String topi consumerConnectionGeneration = new AtomicInteger(); kafkaRecordsLeftToCommit = new AtomicInteger(); this.keepAliveInterval = keepAliveInterval; - this.nextSetOfKeysBeingCommitted = new HashMap<>(); - this.onCommitKeysCallback = onCommitKeysCallback; + this.nextSetOfKeysContextsBeingCommitted = new HashMap<>(); + this.onCommitKeyCallback = onCommitKeyCallback; } @Override @@ -89,7 +117,7 @@ public void onPartitionsRevoked(Collection partitions) { partitions.forEach(p->{ var tp = new TopicPartition(topic, p.partition()); nextSetOfCommitsMap.remove(tp); - nextSetOfKeysBeingCommitted.remove(tp); + nextSetOfKeysContextsBeingCommitted.remove(tp); partitionToOffsetLifecycleTrackerMap.remove(p.partition()); }); kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() @@ -221,7 +249,7 @@ private ConsumerRecords safePollWithSwallowedRuntimeExceptions() } } - void commitKafkaKey(KafkaCommitOffsetData kafkaTsk) { + void commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) { var tracker = partitionToOffsetLifecycleTrackerMap.get(kafkaTsk.getPartition()); if (tracker == null || tracker.consumerConnectionGeneration != kafkaTsk.getGeneration()) { log.atWarn().setMessage(()->"trafficKey's generation (" + kafkaTsk.getGeneration() + ") is not current (" + @@ -232,12 +260,16 @@ void commitKafkaKey(KafkaCommitOffsetData kafkaTsk) { kafkaTsk).log(); return; } + var p = kafkaTsk.getPartition(); Optional newHeadValue; + var k = new TopicPartition(topic, p); + nextSetOfKeysContextsBeingCommitted.computeIfAbsent(k, k2 -> new PriorityQueue<>()) + .add(new OrderedKeyHolder(kafkaTsk.getOffset(), streamKey)); + newHeadValue = tracker.removeAndReturnNewHead(kafkaTsk.getOffset()); newHeadValue.ifPresent(o -> { - var k = new TopicPartition(topic, p); var v = new OffsetAndMetadata(o); log.atDebug().setMessage(()->"Adding new commit " + k + "->" + v + " to map").log(); nextSetOfCommitsMap.put(k, v); @@ -249,8 +281,9 @@ private void safeCommit() { if (!nextSetOfCommitsMap.isEmpty()) { log.atDebug().setMessage(() -> "Committing " + nextSetOfCommitsMap).log(); kafkaConsumer.commitSync(nextSetOfCommitsMap); - onCommitKeysCallback.accept(nextSetOfKeysBeingCommitted.values().stream().flatMap(Collection::stream)); - nextSetOfKeysBeingCommitted.clear(); + nextSetOfCommitsMap.entrySet().stream() + .forEach(kvp->callbackUpTo(nextSetOfKeysContextsBeingCommitted.get(kvp.getKey()), + kvp.getValue().offset())); nextSetOfCommitsMap.clear(); log.trace("partitionToOffsetLifecycleTrackerMap="+partitionToOffsetLifecycleTrackerMap); kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() @@ -273,6 +306,15 @@ private void safeCommit() { } } + private void callbackUpTo(PriorityQueue orderedKeyHolders, long upToOffset) { + for (var nextKeyHolder = orderedKeyHolders.peek(); + nextKeyHolder != null && nextKeyHolder.offset<=upToOffset; + nextKeyHolder = orderedKeyHolders.peek()) { + onCommitKeyCallback.accept(nextKeyHolder.tsk); + orderedKeyHolders.poll(); + } + } + String nextCommitsToString() { return "nextCommits="+nextSetOfCommitsMap.entrySet().stream() .map(kvp->kvp.getKey()+"->"+kvp.getValue()).collect(Collectors.joining(",")); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index eb0b21cbc..897263e39 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -1,9 +1,9 @@ package org.opensearch.migrations.replay.tracing; +import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.tracing.SimpleMeteringClosure; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import java.util.HashMap; import java.util.function.Function; @@ -11,7 +11,33 @@ public class ChannelContextManager implements Function { public static final String TELEMETRY_SCOPE_NAME = "Channel"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - HashMap connectionToChannelContextMap = new HashMap<>(); + + private static class RefCountedContext { + @Getter final ChannelKeyContext context; + private int refCount; + + private RefCountedContext(ChannelKeyContext context) { + this.context = context; + } + + ChannelKeyContext retain() { + refCount++; + return context; + } + + /** + * Returns true if this was the final release + * + * @return + */ + boolean release() { + refCount--; + assert refCount >= 0; + return refCount == 0; + } + } + + HashMap connectionToChannelContextMap = new HashMap<>(); public ChannelKeyContext apply(ITrafficStreamKey tsk) { return retainOrCreateContext(tsk); @@ -23,16 +49,17 @@ public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk, ISpanGenerator spanGenerator) { return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), - k-> new ChannelKeyContext(tsk, spanGenerator).retain()); + k-> new RefCountedContext(new ChannelKeyContext(tsk, spanGenerator))).retain(); } - public ChannelKeyContext releaseContextFor(ITrafficStreamKey tsk) { - var connectionId = tsk.getConnectionId(); - var ctx = connectionToChannelContextMap.get(connectionId); - var finalRelease = ctx.release(); + public ChannelKeyContext releaseContextFor(ChannelKeyContext ctx) { + var connId = ctx.getConnectionId(); + var refCountedCtx = connectionToChannelContextMap.get(connId); + assert ctx == refCountedCtx.context; + var finalRelease = refCountedCtx.release(); if (finalRelease) { ctx.currentSpan.end(); - connectionToChannelContextMap.remove(connectionId); + connectionToChannelContextMap.remove(connId); } return ctx; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java index e58303ae9..20a251fec 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -15,8 +15,6 @@ public class ChannelKeyContext implements IChannelKeyContext, IWithStartTime { final Span currentSpan; @Getter final Instant startTime; - @Getter - int refCount; public ChannelKeyContext(ISourceTrafficChannelKey channelKey, ISpanGenerator spanGenerator) { this.channelKey = channelKey; @@ -29,19 +27,4 @@ public String toString() { return channelKey.toString(); } - public ChannelKeyContext retain() { - refCount++; - return this; - } - - /** - * Returns true if this was the final release - * - * @return - */ - public boolean release() { - refCount--; - assert refCount >= 0; - return refCount == 0; - } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java index 0849576ea..539b336ef 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java @@ -2,6 +2,7 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; public class Contexts { @@ -105,6 +106,11 @@ public ResponseAccumulationContext(IContexts.IReplayerHttpTransactionContext enc super(enclosingScope); setCurrentSpan(spanGenerator); } + + @Override + public void endSpan() { + super.endSpan(); + } } public static class RequestTransformationContext diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java index 47b4ad8bc..70d297f85 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java @@ -10,7 +10,7 @@ import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; public class IContexts { - public static interface IKafkaRecordContext + public interface IKafkaRecordContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); @@ -21,7 +21,7 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } } - public static interface ITrafficStreamsLifecycleContext + public interface ITrafficStreamsLifecycleContext extends IChannelKeyContext, IWithTypedEnclosingScope { ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); @@ -30,7 +30,7 @@ default ISourceTrafficChannelKey getChannelKey() { } } - public static interface IReplayerHttpTransactionContext + public interface IReplayerHttpTransactionContext extends IHttpTransactionContext, IChannelKeyContext, IWithTypedEnclosingScope { static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); @@ -56,24 +56,24 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } } - public static interface IRequestAccumulationContext + public interface IRequestAccumulationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public static interface IResponseAccumulationContext + public interface IResponseAccumulationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public static interface IRequestTransformationContext + public interface IRequestTransformationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public static interface IWaitingForHttpResponseContext + public interface IWaitingForHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public static interface IReceivingHttpResponseContext + public interface IReceivingHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public static interface IRequestSendingContext + public interface IRequestSendingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public static interface ITupleHandlingContext + public interface ITupleHandlingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } } From ccb517cb83d44a6ce33e7bfe8e2c865e049d1b41 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sun, 17 Dec 2023 14:55:09 -0500 Subject: [PATCH 25/94] More work to get context chains to work better together. Signed-off-by: Greg Schohn --- ...edTrafficToHttpTransactionAccumulator.java | 11 +++-- .../migrations/replay/ReplayEngine.java | 7 +-- .../replay/RequestResponsePacketPair.java | 4 +- .../migrations/replay/TrafficReplayer.java | 15 ++++--- .../datatypes/UniqueReplayerRequestKey.java | 6 +-- .../migrations/replay/tracing/Contexts.java | 5 --- .../KafkaCommitsWorkBetweenLongPolls.java | 44 +++++++++++++++++++ .../replay/kafka/KafkaKeepAliveTests.java | 29 ------------ .../replay/kafka/KafkaTracingTest.java | 10 +++++ 9 files changed, 77 insertions(+), 54 deletions(-) create mode 100644 TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index bd68ac710..54a186953 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -102,8 +102,7 @@ public void onExpireAccumulation(String partitionId, Accumulation accumulation) @AllArgsConstructor private static class SpanWrappingAccumulationCallbacks { private final AccumulationCallbacks underlying; - public void onRequestReceived(@NonNull Accumulation accum, - IContexts.IRequestAccumulationContext requestCtx, + public void onRequestReceived(IContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { requestCtx.endSpan(); underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), @@ -400,17 +399,17 @@ private boolean rotateAccumulationOnReadIfNecessary(String connectionId, Accumul private boolean handleEndOfRequest(Accumulation accumulation) { assert accumulation.state == Accumulation.State.ACCUMULATING_READS : "state == " + accumulation.state; var rrPair = accumulation.getRrPair(); - var requestPacketBytes = rrPair.requestData; + var httpMessage = rrPair.requestData; metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) .setAttribute(MetricsAttributeKey.REQUEST_ID, rrPair.getRequestContext().getLogicalEnclosingScope().getReplayerRequestKey().toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, rrPair.getRequestContext().getLogicalEnclosingScope().getLogicalEnclosingScope().getConnectionId()).emit(); - assert (requestPacketBytes != null); - assert (!requestPacketBytes.hasInProgressSegment()); + assert (httpMessage != null); + assert (!httpMessage.hasInProgressSegment()); var requestCtx = rrPair.getRequestContext(); rrPair.rotateRequestGatheringToResponse(); - listener.onRequestReceived(accumulation, requestCtx, requestPacketBytes); + listener.onRequestReceived(requestCtx, httpMessage); accumulation.state = Accumulation.State.ACCUMULATING_WRITES; return true; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index e718e7db9..d07d31150 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -155,14 +155,15 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant return hookWorkFinishingUpdates(sendResult, originalStart, requestKey, label); } - public void closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - IChannelKeyContext ctx, Instant timestamp) { + public DiagnosticTrackableCompletableFuture + closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, + IChannelKeyContext ctx, Instant timestamp) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); logStartOfWork(new IndexedChannelInteraction(channelKey, channelInteractionNum), newCount, atTime, label); var future = networkSendOrchestrator.scheduleClose(ctx, channelInteractionNum, atTime); - hookWorkFinishingUpdates(future, timestamp, channelKey, label); + return hookWorkFinishingUpdates(future, timestamp, channelKey, label); } public DiagnosticTrackableCompletableFuture closeConnectionsAndShutdown() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 231bf89ff..b6aef03bf 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -43,9 +43,9 @@ public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStr var httpTransactionContext = new Contexts.HttpTransactionContext( startingAtTrafficStreamKey.getTrafficStreamsContext(), requestKey, - Accumulation.METERING_CLOSURE.makeSpanContinuation("processingChannel")); + Accumulation.METERING_CLOSURE.makeSpanContinuation("httpTransaction")); requestOrResponseAccumulationContext = new Contexts.RequestAccumulationContext(httpTransactionContext, - Accumulation.METERING_CLOSURE.makeSpanContinuation("httpTransactionAccumulation")); + Accumulation.METERING_CLOSURE.makeSpanContinuation("accumulatingRequest")); } @NonNull ISourceTrafficChannelKey getBeginningTrafficStreamKey() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 253ecd8df..c9b99d9b3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -603,7 +603,8 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { private ITrafficCaptureSource trafficCaptureSource; @Override - public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx, + public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, + IContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); @@ -671,6 +672,7 @@ Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, .log(); throw e; } finally { + rrPair.getHttpTransactionContext().endSpan(); requestToFinalWorkFuturesMap.remove(requestKey); log.trace("removed rrPair.requestData to " + "targetTransactionInProgressMap for " + @@ -707,8 +709,10 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, int IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant timestamp, @NonNull List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); - replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); - commitTrafficStreams(trafficStreamKeysBeingHeld, status); + var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); + cf.map(f->f.whenComplete((v,t)->{ + commitTrafficStreams(trafficStreamKeysBeingHeld, status); + }), ()->"closing the channel in the ReplayEngine"); } @Override @@ -879,6 +883,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture> packetsSupplier) { + // TODO - add context chaining try { var transformationCompleteFuture = replayEngine.scheduleTransformationWork(ctx, start, ()-> transformAllData(inputRequestTransformerFactory.create(requestKey, ctx), packetsSupplier)); @@ -891,11 +896,11 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuturefuture.thenApply(t-> + .map(future->future.thenApply(t -> new TransformedTargetRequestAndResponse(transformedResult.transformedOutput, t, transformedResult.transformationStatus, t.error)), ()->"(if applicable) packaging transformed result into a completed TransformedTargetRequestAndResponse object") - .map(future->future.exceptionally(t-> + .map(future->future.exceptionally(t -> new TransformedTargetRequestAndResponse(transformedResult.transformedOutput, transformedResult.transformationStatus, t)), ()->"(if applicable) packaging transformed result into a failed TransformedTargetRequestAndResponse object"), diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java index 54e116855..1dd711cad 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/UniqueReplayerRequestKey.java @@ -1,11 +1,13 @@ package org.opensearch.migrations.replay.datatypes; import lombok.EqualsAndHashCode; +import lombok.Getter; @EqualsAndHashCode(callSuper = true) public class UniqueReplayerRequestKey extends UniqueSourceRequestKey { public final ITrafficStreamKey trafficStreamKey; public final int sourceRequestIndexOffsetAtStartOfAccumulation; + @Getter public final int replayerRequestIndex; public UniqueReplayerRequestKey(ITrafficStreamKey streamKey, int sourceOffsetAtStartOfAccumulation, @@ -25,10 +27,6 @@ public int getSourceRequestIndex() { return replayerRequestIndex + sourceRequestIndexOffsetAtStartOfAccumulation; } - public int getReplayerRequestIndex() { - return replayerRequestIndex; - } - @Override public String toString() { // The offset that is shown is a mouthful to describe. diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java index 539b336ef..d8f111dc3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java @@ -106,11 +106,6 @@ public ResponseAccumulationContext(IContexts.IReplayerHttpTransactionContext enc super(enclosingScope); setCurrentSpan(spanGenerator); } - - @Override - public void endSpan() { - super.endSpan(); - } } public static class RequestTransformationContext diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index deead7181..d4f5523d0 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -1,11 +1,21 @@ package org.opensearch.migrations.replay.kafka; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import lombok.Lombok; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.Producer; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; @@ -32,6 +42,33 @@ public class KafkaCommitsWorkBetweenLongPolls { private final KafkaContainer embeddedKafkaBroker = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")); + private InMemorySpanExporter testSpanExporter; + private InMemoryMetricExporter testMetricExporter; + + @BeforeEach + void setup() { + GlobalOpenTelemetry.resetForTest(); + testSpanExporter = InMemorySpanExporter.create(); + testMetricExporter = InMemoryMetricExporter.create(); + + OpenTelemetrySdk.builder() + .setTracerProvider( + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(testSpanExporter)).build()) + .setMeterProvider( + SdkMeterProvider.builder() + .registerMetricReader(PeriodicMetricReader.builder(testMetricExporter) + .setInterval(Duration.ofMillis(100)) + .build()) + .build()) + .buildAndRegisterGlobal(); + } + + @AfterEach + void tearDown() { + GlobalOpenTelemetry.resetForTest(); + } + @SneakyThrows private KafkaConsumer buildKafkaConsumer() { var kafkaConsumerProps = KafkaTrafficCaptureSource.buildKafkaProperties(embeddedKafkaBroker.getBootstrapServers(), @@ -84,6 +121,13 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { } } } + + var spans = testSpanExporter.getFinishedSpanItems(); + Assertions.assertFalse(spans.isEmpty(), "No spans were found"); + + var metrics = testMetricExporter.getFinishedMetricItems(); + Assertions.assertFalse(metrics.isEmpty(), "No metrics were found"); + } static Instant getTimeAtPoint(int i) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java index b2319765a..9fc4d6489 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java @@ -47,35 +47,6 @@ public class KafkaKeepAliveTests { new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")); private KafkaTrafficCaptureSource kafkaSource; - /** - * Set up the test case where we've produced and received 1 message, but have not yet committed it. - * Another message is in the process of being produced. - * The BlockingTrafficSource is blocked on everything after a point before the beginning of the test. - * @throws Exception - */ - @BeforeEach - private void setupTestCase() throws Exception { - kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); - this.sendCompleteCount = new AtomicInteger(0); - KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 0, sendCompleteCount).get(); - Assertions.assertEquals(1, sendCompleteCount.get()); - - this.kafkaProperties = KafkaTrafficCaptureSource.buildKafkaProperties(embeddedKafkaBroker.getBootstrapServers(), - TEST_GROUP_CONSUMER_ID, false, null); - Assertions.assertNull(kafkaProperties.get(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY)); - - kafkaProperties.put(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY, MAX_POLL_INTERVAL_MS+""); - kafkaProperties.put(HEARTBEAT_INTERVAL_MS_KEY, HEARTBEAT_INTERVAL_MS+""); - kafkaProperties.put("max.poll.records", 1); - var kafkaConsumer = new KafkaConsumer(kafkaProperties); - this.kafkaSource = new KafkaTrafficCaptureSource(kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); - this.trafficSource = new BlockingTrafficSource(kafkaSource, Duration.ZERO); - this.keysReceived = new ArrayList<>(); - - readNextNStreams(trafficSource, keysReceived, 0, 1); - KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 1, sendCompleteCount); - } - @Test @Tag("longTest") public void testTimeoutsDontOccurForSlowPolls() throws Exception { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java new file mode 100644 index 000000000..802ef98d0 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java @@ -0,0 +1,10 @@ +package org.opensearch.migrations.replay.kafka; + +import org.junit.jupiter.api.Test; + +public class KafkaTracingTest { + @Test + public void testTracingWorks() { + + } +} From 8bda2e38210c704276473a77ba1ff97ec753a297 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sun, 17 Dec 2023 14:57:21 -0500 Subject: [PATCH 26/94] Two critical bugfixes around handling close observations that were discovered by trace inspection. 1) close was called AFTER the RRPair was rotated, so there were no traffic streams being committed, resulting in a perpetual hole in the commit log. 2) close was being scheduled immediately, before all requests (in most cases) because the channelInteractionNumber was being miscalculated. Signed-off-by: Greg Schohn --- .../replay/CapturedTrafficToHttpTransactionAccumulator.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 54a186953..e88b8e5a7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -122,8 +122,7 @@ public void onConnectionClose(@NonNull Accumulation accum, @NonNull List trafficStreamKeysBeingHeld) { var tsCtx = accum.trafficChannelKey.getTrafficStreamsContext(); underlying.onConnectionClose(accum.trafficChannelKey, - accum.startingSourceRequestIndex + accum.startingSourceRequestIndex, - tsCtx, status, when, trafficStreamKeysBeingHeld); + accum.numberOfResets.get(), tsCtx, status, when, trafficStreamKeysBeingHeld); } public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, @@ -263,10 +262,11 @@ private static List getTrafficStreamsHeldByAccum(Accumulation Instant timestamp) { if (observation.hasClose()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); + var heldTrafficStreams = getTrafficStreamsHeldByAccum(accum); rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); closedConnectionCounter.incrementAndGet(); listener.onConnectionClose(accum, RequestResponsePacketPair.ReconstructionStatus.COMPLETE, - timestamp, getTrafficStreamsHeldByAccum(accum)); + timestamp, heldTrafficStreams); return Optional.of(CONNECTION_STATUS.CLOSED); } else if (observation.hasConnectionException()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); From d9df3fafc752d32ad7e45e17dbb1dee2b168e8a4 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 18 Dec 2023 10:00:32 -0500 Subject: [PATCH 27/94] Fix some test code where the nodeId and connectionId got reversed, causing the same ConnectionReplaySession to be returned for every connection, which resulted in serious corruption of ordering and tests never completing. Signed-off-by: Greg Schohn --- .../opensearch/migrations/replay/util/OnlineRadixSorter.java | 1 + .../opensearch/migrations/replay/FullTrafficReplayerTest.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java index a8b9452ff..84d1bfbc5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java @@ -31,6 +31,7 @@ public OnlineRadixSorter(int startingOffset) { } public void add(int index, T item, Consumer sortedItemVisitor) { + assert index >= currentOffset; if (currentOffset == index) { ++currentOffset; sortedItemVisitor.accept(item); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index a91e78172..c895493d5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -188,7 +188,7 @@ public TrafficStreamCursorKey(TrafficStream stream, int arrayIndex) { nodeId = stream.getNodeId(); trafficStreamIndex = TrafficStreamUtils.getTrafficStreamIndex(stream); this.arrayIndex = arrayIndex; - var key = PojoTrafficStreamKeyAndContext.build(connectionId, nodeId, trafficStreamIndex, tsk-> + var key = PojoTrafficStreamKeyAndContext.build(nodeId, connectionId, trafficStreamIndex, tsk-> new TestTrafficStreamsLifecycleContext(tsk)); trafficStreamsContext = key.getTrafficStreamsContext(); key.setTrafficStreamsContext(trafficStreamsContext); From ab3dfb420704e1dff5a63bf56f0e4d3a05d92911 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 18 Dec 2023 23:19:26 -0500 Subject: [PATCH 28/94] Extra guards to try to make tests more reliable, but one of the FullTrafficTest runs (-1, false) is still missing a message. Signed-off-by: Greg Schohn --- .../replay/ClientConnectionPool.java | 6 ++-- .../replay/RequestSenderOrchestrator.java | 3 ++ .../datatypes/ConnectionReplaySession.java | 1 - .../replay/util/OnlineRadixSorter.java | 5 ++++ .../replay/FullTrafficReplayerTest.java | 2 +- .../KafkaCommitsWorkBetweenLongPolls.java | 8 ++--- .../replay/kafka/KafkaKeepAliveTests.java | 29 +++++++++++++++++++ 7 files changed, 46 insertions(+), 8 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index a90775368..dd953f08d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -143,8 +143,7 @@ public void closeConnection(String connId) { public Future submitEventualSessionGet(IChannelKeyContext ctx, boolean ignoreIfNotPresent) { - ConnectionReplaySession channelFutureAndSchedule = - getCachedSession(ctx, ignoreIfNotPresent); + ConnectionReplaySession channelFutureAndSchedule = getCachedSession(ctx, ignoreIfNotPresent); if (channelFutureAndSchedule == null) { var rval = new DefaultPromise(eventLoopGroup.next()); rval.setSuccess(null); @@ -161,11 +160,14 @@ public void closeConnection(String connId) { @SneakyThrows public ConnectionReplaySession getCachedSession(IChannelKeyContext channelKey, boolean dontCreate) { + var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKey.getConnectionId()) : connectionId2ChannelCache.get(channelKey.getConnectionId()); if (crs != null) { crs.setChannelContext(channelKey); } + log.atTrace().setMessage(()->"returning ReplaySession=" + crs + " for " + channelKey.getConnectionId() + + " from " + channelKey).log(); return crs; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index 3eb70c35a..deb9ecc89 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -129,6 +129,9 @@ public StringTrackableCompletableFuture scheduleClose(IChannelKeyContext c runAfterChannelSetup(channelFutureAndRequestSchedule, finalTunneledResponse, replaySession -> { + log.atTrace().setMessage(()->"adding work item at slot " + + channelInteractionNumber + " for " + ctx + " with " + + replaySession.scheduleSequencer).log(); replaySession.scheduleSequencer.add(channelInteractionNumber, () -> successFn.accept(channelFutureAndRequestSchedule), x -> x.run()); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java index ccccf7ee4..117999eb0 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java @@ -6,7 +6,6 @@ import lombok.Setter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.OnlineRadixSorter; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java index 84d1bfbc5..bbe07ed6b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/util/OnlineRadixSorter.java @@ -1,5 +1,7 @@ package org.opensearch.migrations.replay.util; +import lombok.extern.slf4j.Slf4j; + import java.util.ArrayList; import java.util.function.Consumer; @@ -21,6 +23,7 @@ * * @param */ +@Slf4j public class OnlineRadixSorter { ArrayList items; int currentOffset; @@ -34,6 +37,7 @@ public void add(int index, T item, Consumer sortedItemVisitor) { assert index >= currentOffset; if (currentOffset == index) { ++currentOffset; + log.atTrace().setMessage(()->"Running callback for "+index+": "+this).log(); sortedItemVisitor.accept(item); while (currentOffset < items.size()) { var nextItem = items.get(currentOffset); @@ -60,6 +64,7 @@ public boolean hasPending() { @Override public String toString() { final StringBuilder sb = new StringBuilder("OnlineRadixSorter{"); + sb.append("id=").append(System.identityHashCode(this)); sb.append("items=").append(items); sb.append(", currentOffset=").append(currentOffset); sb.append('}'); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index c895493d5..efbe82816 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -158,7 +158,7 @@ public void testDoubleRequestWithCloseIsCommittedOnce() throws Throwable { @Tag("longTest") public void fullTest(int testSize, boolean randomize) throws Throwable { var random = new Random(1); - var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), + var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(200), response->TestHttpServerContext.makeResponse(random,response)); var streamAndConsumer = TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(testSize, randomize); var numExpectedRequests = streamAndConsumer.numHttpTransactions; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index d4f5523d0..e64108a78 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -122,11 +122,11 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { } } - var spans = testSpanExporter.getFinishedSpanItems(); - Assertions.assertFalse(spans.isEmpty(), "No spans were found"); +// var spans = testSpanExporter.getFinishedSpanItems(); +// Assertions.assertFalse(spans.isEmpty(), "No spans were found"); - var metrics = testMetricExporter.getFinishedMetricItems(); - Assertions.assertFalse(metrics.isEmpty(), "No metrics were found"); +// var metrics = testMetricExporter.getFinishedMetricItems(); +// Assertions.assertFalse(metrics.isEmpty(), "No metrics were found"); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java index 9fc4d6489..6a0dfb129 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java @@ -45,7 +45,36 @@ public class KafkaKeepAliveTests { // see https://docs.confluent.io/platform/current/installation/versions-interoperability.html#cp-and-apache-kafka-compatibility private final KafkaContainer embeddedKafkaBroker = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")); + private KafkaTrafficCaptureSource kafkaSource; + /** + * Set up the test case where we've produced and received 1 message, but have not yet committed it. + * Another message is in the process of being produced. + * The BlockingTrafficSource is blocked on everything after a point before the beginning of the test. + * @throws Exception + */ + @BeforeEach + private void setupTestCase() throws Exception { + kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); + this.sendCompleteCount = new AtomicInteger(0); + KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 0, sendCompleteCount).get(); + Assertions.assertEquals(1, sendCompleteCount.get()); + + this.kafkaProperties = KafkaTrafficCaptureSource.buildKafkaProperties(embeddedKafkaBroker.getBootstrapServers(), + TEST_GROUP_CONSUMER_ID, false, null); + Assertions.assertNull(kafkaProperties.get(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY)); + + kafkaProperties.put(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY, MAX_POLL_INTERVAL_MS+""); + kafkaProperties.put(HEARTBEAT_INTERVAL_MS_KEY, HEARTBEAT_INTERVAL_MS+""); + kafkaProperties.put("max.poll.records", 1); + var kafkaConsumer = new KafkaConsumer(kafkaProperties); + this.kafkaSource = new KafkaTrafficCaptureSource(kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); + this.trafficSource = new BlockingTrafficSource(kafkaSource, Duration.ZERO); + this.keysReceived = new ArrayList<>(); + + readNextNStreams(trafficSource, keysReceived, 0, 1); + KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 1, sendCompleteCount); + } @Test @Tag("longTest") From 273c5aa82a58ec6b0e8daf83466482782ffe5181 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 19 Dec 2023 00:17:46 -0500 Subject: [PATCH 29/94] More test fixes, including fixing a regression that I had caused in an earlier edit around flushing the streams held list on close. That should only happen when the close was on an accumulated pair that had a broken request that wasn't rotated. Otherwise, the traffic streams would be committed twice. I also tweaked makeTrafficStreams to keep stable ids on the individual streams to make debugging runs a LOT simpler. Signed-off-by: Greg Schohn --- ...pturedTrafficToHttpTransactionAccumulator.java | 4 +++- ...edTrafficToHttpTransactionAccumulatorTest.java | 7 +++---- .../migrations/replay/TrafficStreamGenerator.java | 15 +++++++++------ 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index e88b8e5a7..e89db0c3c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -263,7 +263,9 @@ private static List getTrafficStreamsHeldByAccum(Accumulation if (observation.hasClose()) { accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); var heldTrafficStreams = getTrafficStreamsHeldByAccum(accum); - rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); + if (rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum)) { + heldTrafficStreams = List.of(); + } closedConnectionCounter.incrementAndGet(); listener.onConnectionClose(accum, RequestResponsePacketPair.ReconstructionStatus.COMPLETE, timestamp, heldTrafficStreams); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 2b8cfdee8..61910722e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -113,8 +113,7 @@ static ByteBuf makeSequentialByteBuf(int offset, int size) { return bb; } - static AtomicInteger uniqueIdCounter = new AtomicInteger(); - static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, + static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, AtomicInteger uniqueIdCounter, List directives) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); var offloader = connectionFactory.createOffloader(new ConnectionContext("n", "test", @@ -191,8 +190,8 @@ public static Tuple2 unzipRequestResponseSizes(List collat @MethodSource("loadSimpleCombinations") void generateAndTest(String testName, int bufferSize, int skipCount, List directives, List expectedSizes) throws Exception { - var trafficStreams = Arrays.stream(makeTrafficStreams(bufferSize, 0, directives)) - .skip(skipCount); + var trafficStreams = Arrays.stream(makeTrafficStreams(bufferSize, 0, new AtomicInteger(), + directives)).skip(skipCount); List reconstructedTransactions = new ArrayList<>(); AtomicInteger requestsReceived = new AtomicInteger(0); accumulateTrafficStreamsWithNewAccumulator(trafficStreams, reconstructedTransactions, requestsReceived); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java index abd0f17e2..62fc5bb3e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java @@ -209,9 +209,10 @@ private static void fillCommandsAndSizes(Random r, double cancelRequestLikelihoo } @SneakyThrows - private static TrafficStream[] fillCommandsAndSizesForSeed(long rSeed, - ArrayList commands, - ArrayList sizes) { + private static TrafficStream[] + fillCommandsAndSizesForSeed(long rSeed, AtomicInteger uniqueIdCounter, + ArrayList commands, + ArrayList sizes) { var r2 = new Random(rSeed); var bufferSize = r2.nextInt(MAX_BUFFER_SIZE-MIN_BUFFER_SIZE) + MIN_BUFFER_SIZE; final var bufferBound = (int)(Math.abs(r2.nextGaussian()) * ((MAX_BUFFER_SIZE_MULTIPLIER * bufferSize)))+1; @@ -221,7 +222,8 @@ private static TrafficStream[] fillCommandsAndSizesForSeed(long rSeed, .log(); var flushLikelihood = Math.pow(r2.nextDouble(),2.0); fillCommandsAndSizes(r2, flushLikelihood/4, flushLikelihood, bufferBound, commands, sizes); - return SimpleCapturedTrafficToHttpTransactionAccumulatorTest.makeTrafficStreams(bufferSize, (int) rSeed, commands); + return SimpleCapturedTrafficToHttpTransactionAccumulatorTest.makeTrafficStreams(bufferSize, (int) rSeed, + uniqueIdCounter, commands); } /** @@ -271,7 +273,7 @@ public static class StreamAndExpectedSizes { generateRandomTrafficStreamsAndSizes(IntStream.range(0,count)) : generateAllIndicativeRandomTrafficStreamsAndSizes(); var testCaseArr = generatedCases.toArray(RandomTrafficStreamAndTransactionSizes[]::new); - log.atInfo().setMessage(()-> + log.atInfo().setMessage(()-> "test case array = \n" + Arrays.stream(testCaseArr) .flatMap(tc->Arrays.stream(tc.trafficStreams).map(TrafficStreamUtils::summarizeTrafficStream)) .collect(Collectors.joining("\n"))) @@ -287,10 +289,11 @@ public static class StreamAndExpectedSizes { public static Stream generateRandomTrafficStreamsAndSizes(IntStream seedStream) { + var uniqueIdCounter = new AtomicInteger(); return seedStream.mapToObj(rSeed->{ var commands = new ArrayList(); var sizes = new ArrayList(); - var trafficStreams = fillCommandsAndSizesForSeed(rSeed, commands, sizes); + var trafficStreams = fillCommandsAndSizesForSeed(rSeed, uniqueIdCounter, commands, sizes); var splitSizes = SimpleCapturedTrafficToHttpTransactionAccumulatorTest.unzipRequestResponseSizes(sizes); return new RandomTrafficStreamAndTransactionSizes(rSeed, trafficStreams, From e0167f571f9d967d466106ad09e86fb6e86862d9 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 19 Dec 2023 10:06:54 -0500 Subject: [PATCH 30/94] Fix a race condition with commitKafkaKey. It could be called from one thread while the nextCommit maps were being modified by other (Kafka Consumer) threads. Now there's a lock and a quick copy to protect against that race. Signed-off-by: Greg Schohn --- .../replay/kafka/TrackingKafkaConsumer.java | 108 ++++++++++++------ 1 file changed, 72 insertions(+), 36 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 1ad4448c3..8a77f26ad 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -86,6 +86,7 @@ public int hashCode() { * the generations of each OffsetLifecycleTracker value may be different. */ final Map partitionToOffsetLifecycleTrackerMap; + private final Object commitDataLock = new Object(); // loosening visibility so that a unit test can read this final Map nextSetOfCommitsMap; final Map> nextSetOfKeysContextsBeingCommitted; @@ -103,35 +104,39 @@ public TrackingKafkaConsumer(Consumer kafkaConsumer, String topi this.clock = c; this.partitionToOffsetLifecycleTrackerMap = new HashMap<>(); this.nextSetOfCommitsMap = new HashMap<>(); + this.nextSetOfKeysContextsBeingCommitted = new HashMap<>(); this.lastTouchTimeRef = new AtomicReference<>(Instant.EPOCH); consumerConnectionGeneration = new AtomicInteger(); kafkaRecordsLeftToCommit = new AtomicInteger(); this.keepAliveInterval = keepAliveInterval; - this.nextSetOfKeysContextsBeingCommitted = new HashMap<>(); this.onCommitKeyCallback = onCommitKeyCallback; } @Override public void onPartitionsRevoked(Collection partitions) { - safeCommit(); - partitions.forEach(p->{ - var tp = new TopicPartition(topic, p.partition()); - nextSetOfCommitsMap.remove(tp); - nextSetOfKeysContextsBeingCommitted.remove(tp); - partitionToOffsetLifecycleTrackerMap.remove(p.partition()); - }); - kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() - .mapToInt(OffsetLifecycleTracker::size).sum()); - log.atWarn().setMessage(()->this+"partitions revoked for "+partitions.stream() - .map(p->p+"").collect(Collectors.joining(","))).log(); + synchronized (commitDataLock) { + safeCommit(); + partitions.forEach(p -> { + var tp = new TopicPartition(topic, p.partition()); + nextSetOfCommitsMap.remove(tp); + nextSetOfKeysContextsBeingCommitted.remove(tp); + partitionToOffsetLifecycleTrackerMap.remove(p.partition()); + }); + kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() + .mapToInt(OffsetLifecycleTracker::size).sum()); + log.atWarn().setMessage(() -> this + "partitions revoked for " + partitions.stream() + .map(p -> p + "").collect(Collectors.joining(","))).log(); + } } @Override public void onPartitionsAssigned(Collection newPartitions) { - consumerConnectionGeneration.incrementAndGet(); - newPartitions.forEach(p->partitionToOffsetLifecycleTrackerMap.computeIfAbsent(p.partition(), - x->new OffsetLifecycleTracker(consumerConnectionGeneration.get()))); - log.atWarn().setMessage(()->this+"partitions added for "+newPartitions.stream() - .map(p->p+"").collect(Collectors.joining(","))).log(); + synchronized (commitDataLock) { + consumerConnectionGeneration.incrementAndGet(); + newPartitions.forEach(p -> partitionToOffsetLifecycleTrackerMap.computeIfAbsent(p.partition(), + x -> new OffsetLifecycleTracker(consumerConnectionGeneration.get()))); + log.atWarn().setMessage(() -> this + "partitions added for " + newPartitions.stream() + .map(p -> p + "").collect(Collectors.joining(","))).log(); + } } public void close() { @@ -250,7 +255,10 @@ private ConsumerRecords safePollWithSwallowedRuntimeExceptions() } void commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) { - var tracker = partitionToOffsetLifecycleTrackerMap.get(kafkaTsk.getPartition()); + OffsetLifecycleTracker tracker; + synchronized (commitDataLock) { + tracker = partitionToOffsetLifecycleTrackerMap.get(kafkaTsk.getPartition()); + } if (tracker == null || tracker.consumerConnectionGeneration != kafkaTsk.getGeneration()) { log.atWarn().setMessage(()->"trafficKey's generation (" + kafkaTsk.getGeneration() + ") is not current (" + (Optional.ofNullable(tracker).map(t->"new generation=" + t.consumerConnectionGeneration) @@ -265,32 +273,49 @@ void commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) Optional newHeadValue; var k = new TopicPartition(topic, p); - nextSetOfKeysContextsBeingCommitted.computeIfAbsent(k, k2 -> new PriorityQueue<>()) - .add(new OrderedKeyHolder(kafkaTsk.getOffset(), streamKey)); newHeadValue = tracker.removeAndReturnNewHead(kafkaTsk.getOffset()); - newHeadValue.ifPresent(o -> { + newHeadValue.ifPresentOrElse(o -> { var v = new OffsetAndMetadata(o); log.atDebug().setMessage(()->"Adding new commit " + k + "->" + v + " to map").log(); - nextSetOfCommitsMap.put(k, v); + synchronized (commitDataLock) { + addKeyContextForEventualCommit(streamKey, kafkaTsk, k); + nextSetOfCommitsMap.put(k, v); + } + }, () -> { + synchronized (commitDataLock) { + addKeyContextForEventualCommit(streamKey, kafkaTsk, k); + } }); } + private void addKeyContextForEventualCommit(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk, TopicPartition k) { + nextSetOfKeysContextsBeingCommitted.computeIfAbsent(k, k2 -> new PriorityQueue<>()) + .add(new OrderedKeyHolder(kafkaTsk.getOffset(), streamKey)); + } + private void safeCommit() { + var nextCommitsMapCopy = new HashMap(); + synchronized (commitDataLock) { + nextCommitsMapCopy.putAll(nextSetOfCommitsMap); + } try { - if (!nextSetOfCommitsMap.isEmpty()) { - log.atDebug().setMessage(() -> "Committing " + nextSetOfCommitsMap).log(); - kafkaConsumer.commitSync(nextSetOfCommitsMap); - nextSetOfCommitsMap.entrySet().stream() - .forEach(kvp->callbackUpTo(nextSetOfKeysContextsBeingCommitted.get(kvp.getKey()), + safeCommitStatic(kafkaConsumer, onCommitKeyCallback, nextCommitsMapCopy); + synchronized (commitDataLock) { + nextCommitsMapCopy.entrySet().stream() + .forEach(kvp->callbackUpTo(onCommitKeyCallback, + nextSetOfKeysContextsBeingCommitted.get(kvp.getKey()), kvp.getValue().offset())); - nextSetOfCommitsMap.clear(); - log.trace("partitionToOffsetLifecycleTrackerMap="+partitionToOffsetLifecycleTrackerMap); - kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() - .mapToInt(OffsetLifecycleTracker::size).sum()); - log.atDebug().setMessage(() -> "Done committing now records in flight=" + - kafkaRecordsLeftToCommit.get()).log(); + nextCommitsMapCopy.forEach((k,v)->nextSetOfCommitsMap.remove(k)); } + // This function will only ever be called in a threadsafe way, mutually exclusive from any + // other call other than commitKafkaKey(). Since commitKafkaKey() doesn't alter + // partitionToOffsetLifecycleTrackerMap, these lines can be outside of the commitDataLock mutex + log.trace("partitionToOffsetLifecycleTrackerMap="+partitionToOffsetLifecycleTrackerMap); + kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() + .mapToInt(OffsetLifecycleTracker::size).sum()); + log.atDebug().setMessage(() -> "Done committing now records in flight=" + + kafkaRecordsLeftToCommit.get()).log(); } catch (RuntimeException e) { log.atWarn().setCause(e) .setMessage(() -> "Error while committing. " + @@ -306,10 +331,21 @@ private void safeCommit() { } } - private void callbackUpTo(PriorityQueue orderedKeyHolders, long upToOffset) { + private static void safeCommitStatic(Consumer kafkaConsumer, + java.util.function.Consumer onCommitKeyCallback, + HashMap nextCommitsMap) { + if (!nextCommitsMap.isEmpty()) { + log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); + kafkaConsumer.commitSync(nextCommitsMap); + } + } + + private static void callbackUpTo(java.util.function.Consumer onCommitKeyCallback, + PriorityQueue orderedKeyHolders, long upToOffset) { for (var nextKeyHolder = orderedKeyHolders.peek(); - nextKeyHolder != null && nextKeyHolder.offset<=upToOffset; - nextKeyHolder = orderedKeyHolders.peek()) { + nextKeyHolder != null && nextKeyHolder.offset <= upToOffset; + nextKeyHolder = orderedKeyHolders.peek()) + { onCommitKeyCallback.accept(nextKeyHolder.tsk); orderedKeyHolders.poll(); } From 3d81ad8de60622e32c8d6a87fbd697367849744b Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 20 Dec 2023 11:07:25 -0500 Subject: [PATCH 31/94] Two changes to kafka interactions. Add trace spans for traffic source/kafka interactions + preempt blocking in the BlockingTrafficSource when there's keys (offsets) to commit. Signed-off-by: Greg Schohn --- .../migrations/replay/TrafficReplayer.java | 3 +- .../kafka/KafkaTrafficCaptureSource.java | 40 ++++--- .../replay/kafka/TrackingKafkaConsumer.java | 64 ++++++++--- .../tracing/AbstractNestedSpanContext.java | 15 ++- .../tracing/DirectNestedSpanContext.java | 2 +- .../traffic/source/BlockingTrafficSource.java | 105 ++++++++++++++---- .../traffic/source/ITrafficCaptureSource.java | 13 ++- .../traffic/source/InputStreamOfTraffic.java | 6 +- .../replay/BlockingTrafficSourceTest.java | 6 +- .../CompressedFileTrafficCaptureSource.java | 11 +- .../replay/FullTrafficReplayerTest.java | 6 +- .../KafkaRestartingTrafficReplayerTest.java | 2 +- .../replay/SentinelSensingTrafficSource.java | 10 +- .../replay/TrafficReplayerTest.java | 4 +- ...KafkaTrafficCaptureSourceLongTermTest.java | 6 +- .../kafka/KafkaTrafficCaptureSourceTest.java | 31 +++--- .../migrations/replay/TestContext.java | 23 ++++ 17 files changed, 245 insertions(+), 102 deletions(-) create mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index c9b99d9b3..e3d5a0071 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -14,7 +14,6 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; @@ -980,7 +979,7 @@ public void pullCaptureFromSourceToAccumulator( if (stopReadingRef.get()) { break; } - this.nextChunkFutureRef.set(trafficChunkStream.readNextTrafficStreamChunk()); + this.nextChunkFutureRef.set(trafficChunkStream.readNextTrafficStreamChunk(null)); List trafficStreams = null; try { trafficStreams = this.nextChunkFutureRef.get().get(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 0eeb13a6c..ff6dd2201 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay.kafka; import com.google.protobuf.InvalidProtocolBufferException; +import io.netty.util.concurrent.DefaultThreadFactory; import lombok.NonNull; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -17,6 +18,8 @@ import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -82,25 +85,25 @@ public class KafkaTrafficCaptureSource implements ISimpleTrafficCaptureSource { private final KafkaBehavioralPolicy behavioralPolicy; private final ChannelContextManager channelContextManager; - public KafkaTrafficCaptureSource(Consumer kafkaConsumer, - String topic, Duration keepAliveInterval) { - this(kafkaConsumer, topic, keepAliveInterval, - Clock.systemUTC(), new KafkaBehavioralPolicy()); + public KafkaTrafficCaptureSource(@NonNull IInstrumentationAttributes globalContext, + Consumer kafkaConsumer, String topic, Duration keepAliveInterval) { + this(globalContext, kafkaConsumer, topic, keepAliveInterval, Clock.systemUTC(), new KafkaBehavioralPolicy()); } - public KafkaTrafficCaptureSource(Consumer kafkaConsumer, + public KafkaTrafficCaptureSource(@NonNull IInstrumentationAttributes globalContext, + Consumer kafkaConsumer, @NonNull String topic, Duration keepAliveInterval, Clock clock, @NonNull KafkaBehavioralPolicy behavioralPolicy) { this.channelContextManager = new ChannelContextManager(); - trackingKafkaConsumer = new TrackingKafkaConsumer(kafkaConsumer, topic, keepAliveInterval, clock, + trackingKafkaConsumer = new TrackingKafkaConsumer(globalContext, kafkaConsumer, topic, keepAliveInterval, clock, this::onKeyFinishedCommitting); trafficStreamsRead = new AtomicLong(); this.behavioralPolicy = behavioralPolicy; kafkaConsumer.subscribe(Collections.singleton(topic), trackingKafkaConsumer); - kafkaExecutor = Executors.newSingleThreadExecutor(); + kafkaExecutor = Executors.newSingleThreadExecutor(new DefaultThreadFactory("kafkaConsumerThread")); } private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { @@ -114,7 +117,8 @@ private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { channelContextManager.releaseContextFor((ChannelKeyContext) kafkaCtx.getImmediateEnclosingScope()); } - public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull String brokers, + public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull IInstrumentationAttributes globalContext, + @NonNull String brokers, @NonNull String topic, @NonNull String groupId, boolean enableMSKAuth, @@ -127,7 +131,7 @@ public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull String brokers kafkaProps.putIfAbsent(MAX_POLL_INTERVAL_KEY, DEFAULT_POLL_INTERVAL_MS); var pollPeriod = Duration.ofMillis(Long.valueOf((String)kafkaProps.get(MAX_POLL_INTERVAL_KEY))); var keepAlivePeriod = getKeepAlivePeriodFromPollPeriod(pollPeriod); - return new KafkaTrafficCaptureSource(new KafkaConsumer<>(kafkaProps), + return new KafkaTrafficCaptureSource(globalContext, new KafkaConsumer<>(kafkaProps), topic, keepAlivePeriod, clock, behavioralPolicy); } @@ -173,8 +177,8 @@ public static Properties buildKafkaProperties(@NonNull String brokers, @Override @SneakyThrows - public void touch() { - CompletableFuture.runAsync(trackingKafkaConsumer::touch, kafkaExecutor).get(); + public void touch(IInstrumentationAttributes context) { + CompletableFuture.runAsync(()->trackingKafkaConsumer.touch(context), kafkaExecutor).get(); } /** @@ -189,18 +193,19 @@ public Optional getNextRequiredTouch() { @Override @SuppressWarnings("unchecked") - public CompletableFuture> readNextTrafficStreamChunk() { + public CompletableFuture> + readNextTrafficStreamChunk(IInstrumentationAttributes context) { log.atTrace().setMessage("readNextTrafficStreamChunk()").log(); return CompletableFuture.supplyAsync(() -> { log.atTrace().setMessage("async...readNextTrafficStreamChunk()").log(); - return readNextTrafficStreamSynchronously(); + return readNextTrafficStreamSynchronously(context); }, kafkaExecutor); } - public List readNextTrafficStreamSynchronously() { + public List readNextTrafficStreamSynchronously(IInstrumentationAttributes context) { log.atTrace().setMessage("readNextTrafficStreamSynchronously()").log(); try { - return trackingKafkaConsumer.getNextBatchOfRecords((offsetData,kafkaRecord) -> { + return trackingKafkaConsumer.getNextBatchOfRecords(context, (offsetData,kafkaRecord) -> { try { TrafficStream ts = TrafficStream.parseFrom(kafkaRecord.value()); // Ensure we increment trafficStreamsRead even at a higher log level @@ -232,12 +237,13 @@ public List readNextTrafficStreamSynchronously() { } @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(IInstrumentationAttributes context, + ITrafficStreamKey trafficStreamKey) { if (!(trafficStreamKey instanceof TrafficStreamKeyWithKafkaRecordId)) { throw new IllegalArgumentException("Expected key of type "+TrafficStreamKeyWithKafkaRecordId.class+ " but received "+trafficStreamKey+" (of type="+trafficStreamKey.getClass()+")"); } - trackingKafkaConsumer.commitKafkaKey(trafficStreamKey, (TrafficStreamKeyWithKafkaRecordId) trafficStreamKey); + return trackingKafkaConsumer.commitKafkaKey(trafficStreamKey, (TrafficStreamKeyWithKafkaRecordId) trafficStreamKey); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 8a77f26ad..1309d3b09 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -11,6 +11,12 @@ import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.tracing.ISpanGenerator; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.slf4j.event.Level; import java.time.Clock; @@ -22,6 +28,7 @@ import java.util.Map; import java.util.Optional; import java.util.PriorityQueue; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; @@ -37,6 +44,8 @@ */ @Slf4j public class TrackingKafkaConsumer implements ConsumerRebalanceListener { + private static final SimpleMeteringClosure METERING_CLOSURE = + new SimpleMeteringClosure("TrackingKafkaConsumer"); @AllArgsConstructor private static class OrderedKeyHolder implements Comparable { @@ -65,6 +74,14 @@ public int hashCode() { } } + public static class PollScopeContext extends DirectNestedSpanContext { + public PollScopeContext(@NonNull IScopedInstrumentationAttributes enclosingScope, + @NonNull ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + /** * The keep-alive should already be set to a fraction of the max poll timeout for * the consumer (done outside of this class). The keep-alive tells this class how @@ -94,7 +111,8 @@ public int hashCode() { private final Duration keepAliveInterval; private final AtomicReference lastTouchTimeRef; private final AtomicInteger consumerConnectionGeneration; - private final AtomicInteger kafkaRecordsLeftToCommit; + private final AtomicInteger kafkaRecordsLeftToCommitEventually; + private final AtomicBoolean kafkaRecordsReadyToCommit; public TrackingKafkaConsumer(Consumer kafkaConsumer, String topic, Duration keepAliveInterval, Clock c, @@ -107,7 +125,8 @@ public TrackingKafkaConsumer(Consumer kafkaConsumer, String topi this.nextSetOfKeysContextsBeingCommitted = new HashMap<>(); this.lastTouchTimeRef = new AtomicReference<>(Instant.EPOCH); consumerConnectionGeneration = new AtomicInteger(); - kafkaRecordsLeftToCommit = new AtomicInteger(); + kafkaRecordsLeftToCommitEventually = new AtomicInteger(); + kafkaRecordsReadyToCommit = new AtomicBoolean(); this.keepAliveInterval = keepAliveInterval; this.onCommitKeyCallback = onCommitKeyCallback; } @@ -122,8 +141,9 @@ public void onPartitionsRevoked(Collection partitions) { nextSetOfKeysContextsBeingCommitted.remove(tp); partitionToOffsetLifecycleTrackerMap.remove(p.partition()); }); - kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() + kafkaRecordsLeftToCommitEventually.set(partitionToOffsetLifecycleTrackerMap.values().stream() .mapToInt(OffsetLifecycleTracker::size).sum()); + kafkaRecordsReadyToCommit.set(!nextSetOfCommitsMap.values().isEmpty()); log.atWarn().setMessage(() -> this + "partitions revoked for " + partitions.stream() .map(p -> p + "").collect(Collectors.joining(","))).log(); } @@ -147,8 +167,8 @@ public void close() { public Optional getNextRequiredTouch() { var lastTouchTime = lastTouchTimeRef.get(); - var r = kafkaRecordsLeftToCommit.get() == 0 ? Optional.empty() - : Optional.of(lastTouchTime.plus(keepAliveInterval)); + var r = kafkaRecordsLeftToCommitEventually.get() == 0 ? Optional.empty() + : Optional.of(kafkaRecordsReadyToCommit.get() ? Instant.now() : lastTouchTime.plus(keepAliveInterval)); log.atTrace().setMessage(()->"returning next required touch at " + r.map(t->""+t).orElse("N/A") + " from a lastTouchTime of "+lastTouchTime).log(); return r; @@ -215,8 +235,9 @@ private Collection getActivePartitions() { } public Stream - getNextBatchOfRecords(BiFunction,T> builder) { - var records = safePollWithSwallowedRuntimeExceptions(); + getNextBatchOfRecords(IScopedInstrumentationAttributes context, + BiFunction, T> builder) { + var records = safePollWithSwallowedRuntimeExceptions(context); safeCommit(); return applyBuilder(builder, records); } @@ -229,19 +250,24 @@ private Stream applyBuilder(BiFunction"records in flight="+kafkaRecordsLeftToCommit.get()).log(); + kafkaRecordsLeftToCommitEventually.incrementAndGet(); + log.atTrace().setMessage(()->"records in flight="+ kafkaRecordsLeftToCommitEventually.get()).log(); return builder.apply(offsetDetails, kafkaRecord); }); } - private ConsumerRecords safePollWithSwallowedRuntimeExceptions() { + private ConsumerRecords + safePollWithSwallowedRuntimeExceptions(IScopedInstrumentationAttributes context) { try { lastTouchTimeRef.set(clock.instant()); - var records = kafkaConsumer.poll(keepAliveInterval.dividedBy(POLL_TIMEOUT_KEEP_ALIVE_DIVISOR)); + ConsumerRecords records; + try (var pollContext = new PollScopeContext(context, + METERING_CLOSURE.makeSpanContinuation("kafkaPoll"))) { + records = kafkaConsumer.poll(keepAliveInterval.dividedBy(POLL_TIMEOUT_KEEP_ALIVE_DIVISOR)); + } log.atLevel(records.isEmpty()? Level.TRACE:Level.INFO) .setMessage(()->"Kafka consumer poll has fetched "+records.count() + " records. " + - "Records in flight=" + kafkaRecordsLeftToCommit.get()).log(); + "Records in flight=" + kafkaRecordsLeftToCommitEventually.get()).log(); log.atTrace().setMessage(()->"All positions: {"+kafkaConsumer.assignment().stream() .map(tp->tp+": "+kafkaConsumer.position(tp)).collect(Collectors.joining(",")) + "}").log(); log.atTrace().setMessage(()->"All previously COMMITTED positions: {"+kafkaConsumer.assignment().stream() @@ -254,7 +280,7 @@ private ConsumerRecords safePollWithSwallowedRuntimeExceptions() } } - void commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) { + ITrafficCaptureSource.CommitResult commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) { OffsetLifecycleTracker tracker; synchronized (commitDataLock) { tracker = partitionToOffsetLifecycleTrackerMap.get(kafkaTsk.getPartition()); @@ -266,7 +292,7 @@ void commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) + "). Dropping this commit request since the record would " + "have been handled again by a current consumer within this process or another. Full key=" + kafkaTsk).log(); - return; + return ITrafficCaptureSource.CommitResult.Ignored; } var p = kafkaTsk.getPartition(); @@ -275,17 +301,19 @@ void commitKafkaKey(ITrafficStreamKey streamKey, KafkaCommitOffsetData kafkaTsk) var k = new TopicPartition(topic, p); newHeadValue = tracker.removeAndReturnNewHead(kafkaTsk.getOffset()); - newHeadValue.ifPresentOrElse(o -> { + return newHeadValue.map(o -> { var v = new OffsetAndMetadata(o); log.atDebug().setMessage(()->"Adding new commit " + k + "->" + v + " to map").log(); synchronized (commitDataLock) { addKeyContextForEventualCommit(streamKey, kafkaTsk, k); nextSetOfCommitsMap.put(k, v); } - }, () -> { + return ITrafficCaptureSource.CommitResult.AfterNextRead; + }).orElseGet(() -> { synchronized (commitDataLock) { addKeyContextForEventualCommit(streamKey, kafkaTsk, k); } + return ITrafficCaptureSource.CommitResult.BlockedByOtherCommits; }); } @@ -312,10 +340,10 @@ private void safeCommit() { // other call other than commitKafkaKey(). Since commitKafkaKey() doesn't alter // partitionToOffsetLifecycleTrackerMap, these lines can be outside of the commitDataLock mutex log.trace("partitionToOffsetLifecycleTrackerMap="+partitionToOffsetLifecycleTrackerMap); - kafkaRecordsLeftToCommit.set(partitionToOffsetLifecycleTrackerMap.values().stream() + kafkaRecordsLeftToCommitEventually.set(partitionToOffsetLifecycleTrackerMap.values().stream() .mapToInt(OffsetLifecycleTracker::size).sum()); log.atDebug().setMessage(() -> "Done committing now records in flight=" + - kafkaRecordsLeftToCommit.get()).log(); + kafkaRecordsLeftToCommitEventually.get()).log(); } catch (RuntimeException e) { log.atWarn().setCause(e) .setMessage(() -> "Error while committing. " + diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java index bbb7f611b..532d0b178 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java @@ -9,13 +9,13 @@ import java.time.Instant; -public abstract class AbstractNestedSpanContext implements - IScopedInstrumentationAttributes, IWithStartTime { +public abstract class AbstractNestedSpanContext + implements IScopedInstrumentationAttributes, IWithStartTime, AutoCloseable { final T enclosingScope; @Getter final Instant startTime; @Getter private Span currentSpan; - public AbstractNestedSpanContext(@NonNull T enclosingScope) { + public AbstractNestedSpanContext(T enclosingScope) { this.enclosingScope = enclosingScope; this.startTime = Instant.now(); } @@ -31,8 +31,17 @@ protected void setCurrentSpan(@NonNull ISpanWithParentGenerator spanGenerator) { setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan())); } + protected void setCurrentSpanWithNoParent(@NonNull ISpanWithParentGenerator spanGenerator) { + assert enclosingScope == null; + setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), null)); + } + protected void setCurrentSpan(@NonNull Span s) { assert currentSpan == null : "only expect to set the current span once"; currentSpan = s; } + + public void close() { + endSpan(); + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java index 66eb863f1..a2c8db819 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java @@ -7,7 +7,7 @@ public class DirectNestedSpanContext extends AbstractNestedSpanContext implements IWithTypedEnclosingScope { - public DirectNestedSpanContext(@NonNull T enclosingScope) { + public DirectNestedSpanContext(T enclosingScope) { super(enclosingScope); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index ddf838e27..28c7f1895 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -1,9 +1,17 @@ package org.opensearch.migrations.replay.traffic.source; import com.google.protobuf.Timestamp; +import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.tracing.ISpanGenerator; +import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.slf4j.event.Level; @@ -33,6 +41,8 @@ */ @Slf4j public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlowController { + private static final SimpleMeteringClosure METERING_CLOSURE = + new SimpleMeteringClosure("BlockingTrafficSource"); private final ISimpleTrafficCaptureSource underlyingSource; private final AtomicReference lastTimestampSecondsRef; @@ -43,9 +53,38 @@ public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlo private final Semaphore readGate; private final Duration bufferTimeWindow; + public static class ReadChunkContext extends DirectNestedSpanContext { + @Getter IScopedInstrumentationAttributes enclosingScope; - public BlockingTrafficSource(ISimpleTrafficCaptureSource underlying, - Duration bufferTimeWindow) { + public ReadChunkContext(IScopedInstrumentationAttributes enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpanWithNoParent(spanGenerator); + } + + @Override + public void close() { + endSpan(); + } + } + + public static class BackPressureBlockContext extends DirectNestedSpanContext { + public BackPressureBlockContext(@NonNull ReadChunkContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class WaitForNextSignal extends DirectNestedSpanContext { + public WaitForNextSignal(@NonNull BackPressureBlockContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public BlockingTrafficSource(ISimpleTrafficCaptureSource underlying, Duration bufferTimeWindow) { this.underlyingSource = underlying; this.stopReadingAtRef = new AtomicReference<>(Instant.EPOCH); this.lastTimestampSecondsRef = new AtomicReference<>(Instant.EPOCH); @@ -81,6 +120,10 @@ public Duration getBufferTimeWindow() { return bufferTimeWindow; } + public CompletableFuture> readNextTrafficStreamChunk() { + return readNextTrafficStreamChunk(null); + } + /** * Reads the next chunk that is available before the current stopReading barrier. However, * that barrier isn't meant to be a tight barrier with immediate effect. @@ -89,56 +132,66 @@ public Duration getBufferTimeWindow() { */ @Override public CompletableFuture> - readNextTrafficStreamChunk() { + readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + var readContext = new ReadChunkContext(context, + METERING_CLOSURE.makeSpanContinuation("readNextTrafficStreamChunk")); log.info("BlockingTrafficSource::readNext"); var trafficStreamListFuture = CompletableFuture - .supplyAsync(this::blockIfNeeded, task -> new Thread(task).start()) - .thenCompose(v->{ + .supplyAsync(() -> blockIfNeeded(readContext), task -> new Thread(task).start()) + .thenCompose(v -> { log.info("BlockingTrafficSource::composing"); - return underlyingSource.readNextTrafficStreamChunk(); - }); - return trafficStreamListFuture.whenComplete((v,t)->{ + return underlyingSource.readNextTrafficStreamChunk(readContext); + }) + .whenComplete((v,t)->readContext.endSpan()); + return trafficStreamListFuture.whenComplete((v, t) -> { if (t != null) { return; } - var maxLocallyObservedTimestamp = v.stream().flatMap(tswk->tswk.getStream().getSubStreamList().stream()) - .map(tso->tso.getTs()) + var maxLocallyObservedTimestamp = v.stream() + .flatMap(tswk -> tswk.getStream().getSubStreamList().stream()) + .map(tso -> tso.getTs()) .max(Comparator.comparingLong(Timestamp::getSeconds) .thenComparingInt(Timestamp::getNanos)) .map(TrafficStreamUtils::instantFromProtoTimestamp) .orElse(Instant.EPOCH); Utils.setIfLater(lastTimestampSecondsRef, maxLocallyObservedTimestamp); - log.atTrace().setMessage(()->"end of readNextTrafficStreamChunk trigger...lastTimestampSecondsRef=" - +lastTimestampSecondsRef.get()).log(); + log.atTrace().setMessage(() -> "end of readNextTrafficStreamChunk trigger...lastTimestampSecondsRef=" + + lastTimestampSecondsRef.get()).log(); }); } - private Void blockIfNeeded() { + private Void blockIfNeeded(ReadChunkContext readContext) { if (stopReadingAtRef.get().equals(Instant.EPOCH)) { return null; } - log.atInfo().setMessage(()->"stopReadingAtRef="+stopReadingAtRef+ - " lastTimestampSecondsRef="+lastTimestampSecondsRef).log(); + log.atInfo().setMessage(() -> "stopReadingAtRef=" + stopReadingAtRef + + " lastTimestampSecondsRef=" + lastTimestampSecondsRef).log(); + BackPressureBlockContext blockContext = null; while (stopReadingAtRef.get().isBefore(lastTimestampSecondsRef.get())) { - try { + if (blockContext == null) { + blockContext = new BackPressureBlockContext(readContext, + METERING_CLOSURE.makeSpanContinuation("backPressureBlock")); + } + try (var waitContext = new WaitForNextSignal(blockContext, + METERING_CLOSURE.makeSpanContinuation("waitForNextBackPressureCheck"))) { log.atInfo().setMessage("blocking until signaled to read the next chunk last={} stop={}") .addArgument(lastTimestampSecondsRef.get()) .addArgument(stopReadingAtRef.get()) .log(); var nextTouchOp = underlyingSource.getNextRequiredTouch(); if (nextTouchOp.isEmpty()) { - log.trace("acquring readGate semaphore (w/out timeout)"); + log.trace("acquiring readGate semaphore (w/out timeout)"); readGate.acquire(); } else { var nextInstant = nextTouchOp.get(); final var nowTime = Instant.now(); var waitIntervalMs = Duration.between(nowTime, nextInstant).toMillis(); - log.atDebug().setMessage(()->"Next touch at " + nextInstant + - " ... in " + waitIntervalMs + "ms (now="+nowTime+")").log(); + log.atDebug().setMessage(() -> "Next touch at " + nextInstant + + " ... in " + waitIntervalMs + "ms (now=" + nowTime + ")").log(); if (waitIntervalMs <= 0) { underlyingSource.touch(); } else { // if this doesn't succeed, we'll loop around & likely do a touch, then loop around again. // if it DOES succeed, we'll loop around and make sure that there's not another reason to stop - log.atTrace().setMessage(()->"acquring readGate semaphore with timeout="+waitIntervalMs).log(); + log.atTrace().setMessage(() -> "acquring readGate semaphore with timeout=" + waitIntervalMs).log(); readGate.tryAcquire(waitIntervalMs, TimeUnit.MILLISECONDS); } } @@ -148,12 +201,20 @@ private Void blockIfNeeded() { break; } } + if (blockContext != null) { + blockContext.endSpan(); + } return null; } @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { - underlyingSource.commitTrafficStream(trafficStreamKey); + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { + var commitResult = underlyingSource.commitTrafficStream(trafficStreamKey); + if (commitResult == CommitResult.AfterNextRead) { + readGate.drainPermits(); + readGate.release(); + } + return commitResult; } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java index 625bde671..97e8e825e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java @@ -1,10 +1,10 @@ package org.opensearch.migrations.replay.traffic.source; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.Closeable; import java.io.IOException; -import java.time.Duration; import java.time.Instant; import java.util.List; import java.util.Optional; @@ -12,9 +12,16 @@ public interface ITrafficCaptureSource extends Closeable { - CompletableFuture> readNextTrafficStreamChunk(); + enum CommitResult { + Immediate, AfterNextRead, BlockedByOtherCommits, Ignored + } - default void commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException {} + CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context); + + /** + * Returns true if the committed results are immediate + */ + CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException; default void close() throws IOException {} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 5daf74a5b..d5cab1132 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -56,7 +57,7 @@ public IChannelKeyContext getChannelKeyContext() { * * @return */ - public CompletableFuture> readNextTrafficStreamChunk() { + public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { return CompletableFuture.supplyAsync(() -> { var builder = TrafficStream.newBuilder(); try { @@ -82,8 +83,9 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { // do nothing - this datasource isn't transactional + return CommitResult.Immediate; } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 60eb2c163..78978e3f9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -77,7 +78,7 @@ private static class TestTrafficCaptureSource implements ISimpleTrafficCaptureSo } @Override - public CompletableFuture> readNextTrafficStreamChunk() { + public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { log.atTrace().setMessage(()->"Test.readNextTrafficStreamChunk.counter="+counter).log(); var i = counter.getAndIncrement(); if (i >= nStreamsToCreate) { @@ -105,8 +106,9 @@ public CompletableFuture> readNextTrafficStreamChunk public void close() throws IOException {} @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { // do nothing + return CommitResult.Immediate; } } } \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java index 25cb216ce..308fbfc3c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java @@ -4,14 +4,12 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; -import org.opensearch.migrations.trafficcapture.protos.TrafficStream; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.EOFException; import java.io.FileInputStream; import java.io.IOException; -import java.util.HashMap; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -33,16 +31,17 @@ private static InputStreamOfTraffic getTrafficSource(String filename) throws IOE } @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { // do nothing + return CommitResult.Immediate; } @Override - public CompletableFuture> readNextTrafficStreamChunk() { + public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { if (numberOfTrafficStreamsToRead.get() <= 0) { return CompletableFuture.failedFuture(new EOFException()); } - return trafficSource.readNextTrafficStreamChunk() + return trafficSource.readNextTrafficStreamChunk(context) .thenApply(ltswk -> { var transformedTrafficStream = ltswk.stream().map(this::modifyTrafficStream).collect(Collectors.toList()); var oldValue = numberOfTrafficStreamsToRead.get(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index efbe82816..9d50a370c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -20,6 +20,7 @@ import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; @@ -230,7 +231,7 @@ public ArrayCursorTrafficCaptureSource(ArrayCursorTrafficSourceFactory arrayCurs } @Override - public CompletableFuture> readNextTrafficStreamChunk() { + public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { var idx = readCursor.getAndIncrement(); log.info("reading chunk from index="+idx); if (arrayCursorTrafficSourceFactory.trafficStreamsList.size() <= idx) { @@ -246,7 +247,7 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { synchronized (pQueue) { // figure out if I need to do something more efficient later log.info("Commit called for "+trafficStreamKey+" with pQueue.size="+pQueue.size()); var incomingCursor = ((TrafficStreamCursorKey)trafficStreamKey).arrayIndex; @@ -265,6 +266,7 @@ public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) { log.info("Commit called for "+trafficStreamKey+", but topCursor="+topCursor); } } + return CommitResult.Immediate; } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index 3859915c3..6a163287b 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -165,7 +165,7 @@ Producer buildKafkaProducer() { try { for (int i = 0; i < recordCount; ++i) { List chunks = null; - chunks = originalTrafficSource.readNextTrafficStreamChunk().get(); + chunks = originalTrafficSource.readNextTrafficStreamChunk(TestContext.singleton).get(); for (int j = 0; j < chunks.size(); ++j) { KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, chunks.get(j).getStream(), TEST_TOPIC_NAME, "KEY_" + i + "_" + j); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java index 5ef78d922..c0b2a604f 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java @@ -4,11 +4,11 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.EOFException; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; @@ -25,11 +25,11 @@ public SentinelSensingTrafficSource(ISimpleTrafficCaptureSource underlyingSource } @Override - public CompletableFuture> readNextTrafficStreamChunk() { + public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { if (stopReadingRef.get()) { return CompletableFuture.failedFuture(new EOFException()); } - return underlyingSource.readNextTrafficStreamChunk().thenApply(v->{ + return underlyingSource.readNextTrafficStreamChunk(context).thenApply(v->{ if (v != null) { return v.stream().takeWhile(ts->{ var isSentinel = ts.getStream().getConnectionId().equals(SENTINEL_CONNECTION_ID); @@ -45,8 +45,8 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public void commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { - underlyingSource.commitTrafficStream(trafficStreamKey); + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { + return underlyingSource.commitTrafficStream(trafficStreamKey); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 684e870d9..4c79a62c5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -9,7 +9,6 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; @@ -118,7 +117,8 @@ public void testDelimitedDeserializer() throws Exception { var allMatch = new AtomicBoolean(true); try (var trafficProducer = new InputStreamOfTraffic(bais)) { while (true) { - trafficProducer.readNextTrafficStreamChunk().get().stream().forEach(ts->{ + trafficProducer.readNextTrafficStreamChunk(TestContext.singleton).get().stream() + .forEach(ts->{ var i = counter.incrementAndGet(); var expectedStream = makeTrafficStream(timestamp.plus(i - 1, ChronoUnit.SECONDS), i); var isEqual = ts.getStream().equals(expectedStream); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index 40e15c712..555f146ea 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -5,6 +5,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.replay.TestContext; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; @@ -59,7 +60,7 @@ public void testTrafficCaptureSource() throws Exception { for (int i=0; i { - var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk().get(1, TimeUnit.SECONDS); + var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(TestContext.singleton) + .get(1, TimeUnit.SECONDS); if (rogueChunk.isEmpty()) { // TimeoutExceptions cannot be thrown by the supplier of the CompletableFuture today, BUT we // could long-poll on the broker for longer than the timeout value supplied in the get() call above diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index 3632106f4..526efda8c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -10,6 +10,7 @@ import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.replay.TestContext; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; @@ -72,13 +73,14 @@ public void testSupplyTrafficFromSource() { var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { - protobufConsumer.readNextTrafficStreamChunk().get().stream().forEach(streamWithKey->{ - tsCount.incrementAndGet(); - log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); - Assertions.assertInstanceOf(ITrafficStreamWithKey.class, streamWithKey); - Assertions.assertEquals(streamWithKey.getStream().getSubStreamCount(), - substreamCounts.get(foundStreamsCount.getAndIncrement())); - }); + protobufConsumer.readNextTrafficStreamChunk(TestContext.singleton).get().stream() + .forEach(streamWithKey -> { + tsCount.incrementAndGet(); + log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); + Assertions.assertInstanceOf(ITrafficStreamWithKey.class, streamWithKey); + Assertions.assertEquals(streamWithKey.getStream().getSubStreamCount(), + substreamCounts.get(foundStreamsCount.getAndIncrement())); + }); } }); Assertions.assertEquals(foundStreamsCount.get(), numTrafficStreams); @@ -123,13 +125,14 @@ public void testSupplyTrafficWithUnformattedMessages() { var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { - protobufConsumer.readNextTrafficStreamChunk().get().stream().forEach(streamWithKey->{ - tsCount.incrementAndGet(); - log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); - Assertions.assertInstanceOf(ITrafficStreamWithKey.class, streamWithKey); - Assertions.assertEquals(streamWithKey.getStream().getSubStreamCount(), - substreamCounts.get(foundStreamsCount.getAndIncrement())); - }); + protobufConsumer.readNextTrafficStreamChunk(TestContext.singleton).get().stream() + .forEach(streamWithKey->{ + tsCount.incrementAndGet(); + log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); + Assertions.assertInstanceOf(ITrafficStreamWithKey.class, streamWithKey); + Assertions.assertEquals(streamWithKey.getStream().getSubStreamCount(), + substreamCounts.get(foundStreamsCount.getAndIncrement())); + }); } }); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java new file mode 100644 index 000000000..8a917406c --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java @@ -0,0 +1,23 @@ +package org.opensearch.migrations.replay; + +import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; + +public class TestContext implements IScopedInstrumentationAttributes { + public static final TestContext singleton = new TestContext(); + + @Override + public IInstrumentationAttributes getEnclosingScope() { + return null; + } + + @Getter + public Span currentSpan; + public TestContext() { + currentSpan = new SimpleMeteringClosure("test").makeSpanContinuation("testSpan") + .apply(getPopulatedAttributes(), null); + } +} From ae45a6a59dfc9ea21921cd529a64a7483b01dac5 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 20 Dec 2023 11:35:35 -0500 Subject: [PATCH 32/94] Extract an IInstrumentationAttributes interface from IScopedInstrumentationAttributes. This allows for passing root contexts around that have attributes but don't have an associated span. This helps make the code have less assumptions about how it is situated. The change also opens the door to removing all of the static factories for spans and metrics. Those factories can be chained from a top-level context that is passed throughout the callstack, rooted from these new IInstrumentationAttributes classes. That change isn't here, but will probably be completed in the near future & this makes it easier. I'm also in the process of adding contexts and spans to more places (like the high-level traffic source work), which is what precipitated the greater change. Signed-off-by: Greg Schohn --- .../migrations/tracing/EmptyContext.java | 9 +-- .../tracing/IInstrumentationAttributes.java | 33 +++++++++++ .../IScopedInstrumentationAttributes.java | 27 +-------- .../tracing/IWithStartTimeAndAttributes.java | 2 +- .../tracing/SimpleMeteringClosure.java | 16 ++--- .../commoncontexts/IConnectionContext.java | 3 +- .../replay/TrafficCaptureSourceFactory.java | 12 ++-- .../migrations/replay/TrafficReplayer.java | 45 ++++++++------ .../replay/kafka/TrackingKafkaConsumer.java | 59 +++++++++++++------ .../tracing/AbstractNestedSpanContext.java | 13 +++- .../tracing/DirectNestedSpanContext.java | 3 +- .../tracing/IndirectNestedSpanContext.java | 3 +- .../traffic/source/BlockingTrafficSource.java | 21 +++---- .../traffic/source/ITrafficCaptureSource.java | 8 ++- .../traffic/source/InputStreamOfTraffic.java | 6 +- .../replay/BlockingTrafficSourceTest.java | 6 +- .../CompressedFileTrafficCaptureSource.java | 5 +- .../replay/FullTrafficReplayerTest.java | 7 ++- .../KafkaRestartingTrafficReplayerTest.java | 4 +- .../replay/SentinelSensingTrafficSource.java | 8 ++- .../replay/TrafficReplayerRunner.java | 2 +- .../replay/TrafficReplayerTest.java | 6 +- .../KafkaCommitsWorkBetweenLongPolls.java | 9 +-- .../replay/kafka/KafkaKeepAliveTests.java | 10 ++-- ...KafkaTrafficCaptureSourceLongTermTest.java | 4 +- .../kafka/KafkaTrafficCaptureSourceTest.java | 8 +-- 26 files changed, 196 insertions(+), 133 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java index 97cc56cbf..9d335bfe0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java @@ -3,18 +3,13 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; -public class EmptyContext implements IScopedInstrumentationAttributes { +public class EmptyContext implements IInstrumentationAttributes { public static final EmptyContext singleton = new EmptyContext(); private EmptyContext() {} @Override - public Span getCurrentSpan() { - throw new IllegalStateException("This class doesn't track spans"); - } - - @Override - public IScopedInstrumentationAttributes getEnclosingScope() { + public IInstrumentationAttributes getEnclosingScope() { return null; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java new file mode 100644 index 000000000..b18cb8104 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -0,0 +1,33 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; + +import java.util.ArrayList; + +public interface IInstrumentationAttributes { + IInstrumentationAttributes getEnclosingScope(); + + default AttributesBuilder fillAttributes(AttributesBuilder builder) { + return builder; + } + + default Attributes getPopulatedAttributes() { + return getPopulatedAttributesBuilder().build(); + } + + default AttributesBuilder getPopulatedAttributesBuilder() { + var currentObj = this; + var stack = new ArrayList(); + var builder = Attributes.builder(); + while (currentObj != null) { + stack.add(currentObj); + currentObj = currentObj.getEnclosingScope(); + } + // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts + for (int i=stack.size()-1; i>=0; --i) { + builder = stack.get(i).fillAttributes(builder); + } + return builder; + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 511cc710a..4d14a44f4 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -3,38 +3,15 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; +import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; -public interface IScopedInstrumentationAttributes { - IScopedInstrumentationAttributes getEnclosingScope(); +public interface IScopedInstrumentationAttributes extends IInstrumentationAttributes { Span getCurrentSpan(); default void endSpan() { getCurrentSpan().end(); } - - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder; - } - - default Attributes getPopulatedAttributes() { - return getPopulatedAttributesBuilder().build(); - } - - default AttributesBuilder getPopulatedAttributesBuilder() { - var currentObj = this; - var stack = new ArrayList(); - var builder = Attributes.builder(); - while (currentObj != null) { - stack.add(currentObj); - currentObj = currentObj.getEnclosingScope(); - } - // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts - for (int i=stack.size()-1; i>=0; --i) { - builder = stack.get(i).fillAttributes(builder); - } - return builder; - } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 1c6986815..76f3c04c8 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -1,4 +1,4 @@ package org.opensearch.migrations.tracing; -public interface IWithStartTimeAndAttributes extends IWithStartTime, IScopedInstrumentationAttributes { +public interface IWithStartTimeAndAttributes extends IWithStartTime, IInstrumentationAttributes { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java index d16018af1..0a7f4d8c6 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java @@ -81,11 +81,11 @@ public static void initializeOpenTelemetry(String serviceName, String collectorE //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); } - public void meterIncrementEvent(IScopedInstrumentationAttributes ctx, String eventName) { + public void meterIncrementEvent(IInstrumentationAttributes ctx, String eventName) { meterIncrementEvent(ctx, eventName, 1); } - public void meterIncrementEvent(IScopedInstrumentationAttributes ctx, String eventName, long increment) { + public void meterIncrementEvent(IInstrumentationAttributes ctx, String eventName, long increment) { if (ctx == null) { return; } @@ -95,7 +95,7 @@ public void meterIncrementEvent(IScopedInstrumentationAttributes ctx, String eve .build()); } - public void meterDeltaEvent(IScopedInstrumentationAttributes ctx, String eventName, long delta) { + public void meterDeltaEvent(IInstrumentationAttributes ctx, String eventName, long delta) { if (ctx == null) { return; } @@ -105,23 +105,23 @@ public void meterDeltaEvent(IScopedInstrumentationAttributes ctx, String eventNa .build()); } - public void meterHistogramMillis(T ctx, String eventName) { + public void meterHistogramMillis(T ctx, String eventName) { meterHistogram(ctx, eventName, "ms", Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); } - public void meterHistogramMicros(T ctx, String eventName) { + public void meterHistogramMicros(T ctx, String eventName) { meterHistogram(ctx, eventName, "us", Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); } - public void meterHistogramMillis(IScopedInstrumentationAttributes ctx, String eventName, Duration between) { + public void meterHistogramMillis(IInstrumentationAttributes ctx, String eventName, Duration between) { meterHistogram(ctx, eventName, "ms", between.toMillis()); } - public void meterHistogramMicros(IScopedInstrumentationAttributes ctx, String eventName, Duration between) { + public void meterHistogramMicros(IInstrumentationAttributes ctx, String eventName, Duration between) { meterHistogram(ctx, eventName, "us", between.toNanos()*1000); } - public void meterHistogram(IScopedInstrumentationAttributes ctx, String eventName, String units, long value) { + public void meterHistogram(IInstrumentationAttributes ctx, String eventName, String units, long value) { if (ctx == null) { return; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 74be9003a..845b13f40 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface IConnectionContext extends IScopedInstrumentationAttributes { @@ -12,7 +13,7 @@ public interface IConnectionContext extends IScopedInstrumentationAttributes { String getNodeId(); @Override - default IScopedInstrumentationAttributes getEnclosingScope() { return null; } + default IInstrumentationAttributes getEnclosingScope() { return null; } @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java index cb41231bf..a0880e074 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java @@ -7,6 +7,8 @@ import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.FileInputStream; import java.io.IOException; @@ -19,12 +21,14 @@ public class TrafficCaptureSourceFactory { private TrafficCaptureSourceFactory() {} public static BlockingTrafficSource - createTrafficCaptureSource(TrafficReplayer.Parameters appParams, Duration bufferTimeWindow) throws IOException { - return new BlockingTrafficSource(createUnbufferedTrafficCaptureSource(appParams), bufferTimeWindow); + createTrafficCaptureSource(IInstrumentationAttributes ctx, + TrafficReplayer.Parameters appParams, Duration bufferTimeWindow) throws IOException { + return new BlockingTrafficSource(createUnbufferedTrafficCaptureSource(ctx, appParams), bufferTimeWindow); } public static ISimpleTrafficCaptureSource - createUnbufferedTrafficCaptureSource(TrafficReplayer.Parameters appParams) throws IOException { + createUnbufferedTrafficCaptureSource(IInstrumentationAttributes ctx, + TrafficReplayer.Parameters appParams) throws IOException { boolean isKafkaActive = TrafficReplayer.validateRequiredKafkaParams(appParams.kafkaTrafficBrokers, appParams.kafkaTrafficTopic, appParams.kafkaTrafficGroupId); boolean isInputFileActive = appParams.inputFilename != null; @@ -33,7 +37,7 @@ private TrafficCaptureSourceFactory() {} } if (isKafkaActive) { - return KafkaTrafficCaptureSource.buildKafkaSource( + return KafkaTrafficCaptureSource.buildKafkaSource(ctx, appParams.kafkaTrafficBrokers, appParams.kafkaTrafficTopic, appParams.kafkaTrafficGroupId, appParams.kafkaTrafficEnableMSKAuth, appParams.kafkaTrafficPropertyFile, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index e3d5a0071..d104db4cb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -16,6 +16,8 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.EmptyContext; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.transform.IHttpMessage; @@ -87,6 +89,7 @@ public class TrafficReplayer { private final TrafficStreamLimiter liveTrafficStreamLimiter; private final AtomicInteger successfulRequestCount; private final AtomicInteger exceptionRequestCount; + private final IInstrumentationAttributes topLevelContext; private ConcurrentHashMap> requestFutureMap; private ConcurrentHashMap"Done receiving captured stream for " + requestKey + ":" + rrPair.requestData).log(); var resultantCf = requestFutureMap.remove(requestKey) - .map(f -> f.handle((summary,t)->handleCompletedTransaction(requestKey, rrPair, summary, t)), + .map(f -> f.handle((summary,t)->handleCompletedTransaction(ctx, requestKey, rrPair, summary, t)), () -> "TrafficReplayer.runReplayWithIOStreams.progressTracker"); if (!resultantCf.future.isDone()) { log.trace("Adding " + requestKey + " to targetTransactionInProgressMap"); @@ -638,7 +646,8 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, } } - Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, + Void handleCompletedTransaction(IInstrumentationAttributes context, + @NonNull UniqueReplayerRequestKey requestKey, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { try { @@ -647,7 +656,7 @@ Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, // Escalate it up out handling stack and shutdown. if (t == null || t instanceof Exception) { packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); - commitTrafficStreams(rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); + commitTrafficStreams(context, rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); return null; } else { log.atError().setCause(t).setMessage(()->"Throwable passed to handle() for " + requestKey + @@ -683,22 +692,24 @@ Void handleCompletedTransaction(@NonNull UniqueReplayerRequestKey requestKey, public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { - commitTrafficStreams(trafficStreamKeysBeingHeld, status); + commitTrafficStreams(ctx, trafficStreamKeysBeingHeld, status); } @SneakyThrows - private void commitTrafficStreams(List trafficStreamKeysBeingHeld, + private void commitTrafficStreams(IInstrumentationAttributes context, + List trafficStreamKeysBeingHeld, RequestResponsePacketPair.ReconstructionStatus status) { - commitTrafficStreams(trafficStreamKeysBeingHeld, + commitTrafficStreams(context, trafficStreamKeysBeingHeld, status != RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY); } @SneakyThrows - private void commitTrafficStreams(List trafficStreamKeysBeingHeld, boolean shouldCommit) { + private void commitTrafficStreams(IInstrumentationAttributes context, + List trafficStreamKeysBeingHeld, boolean shouldCommit) { if (shouldCommit && trafficStreamKeysBeingHeld != null) { for (var tsk : trafficStreamKeysBeingHeld) { tsk.getTrafficStreamsContext().endSpan(); - trafficCaptureSource.commitTrafficStream(tsk); + trafficCaptureSource.commitTrafficStream(context, tsk); } } } @@ -710,13 +721,13 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, int replayEngine.setFirstTimestamp(timestamp); var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); cf.map(f->f.whenComplete((v,t)->{ - commitTrafficStreams(trafficStreamKeysBeingHeld, status); + commitTrafficStreams(ctx, trafficStreamKeysBeingHeld, status); }), ()->"closing the channel in the ReplayEngine"); } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IChannelKeyContext ctx) { - commitTrafficStreams(List.of(tsk), true); + commitTrafficStreams(ctx, List.of(tsk), true); } private TransformedTargetRequestAndResponse diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 1309d3b09..8fc931bb1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -13,6 +13,7 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; @@ -74,14 +75,22 @@ public int hashCode() { } } - public static class PollScopeContext extends DirectNestedSpanContext { - public PollScopeContext(@NonNull IScopedInstrumentationAttributes enclosingScope, + public static class PollScopeContext extends DirectNestedSpanContext { + public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope, @NonNull ISpanWithParentGenerator spanGenerator) { super(enclosingScope); setCurrentSpan(spanGenerator); } } + public static class CommitScopeContext extends DirectNestedSpanContext { + public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope, + @NonNull ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + /** * The keep-alive should already be set to a fraction of the max poll timeout for * the consumer (done outside of this class). The keep-alive tells this class how @@ -92,6 +101,8 @@ public PollScopeContext(@NonNull IScopedInstrumentationAttributes enclosingScope * which happens after we poll() (on the same thread, as per Consumer requirements). */ public static final int POLL_TIMEOUT_KEEP_ALIVE_DIVISOR = 4; + + @NonNull private final IInstrumentationAttributes globalContext; private final Consumer kafkaConsumer; final String topic; @@ -114,9 +125,11 @@ public PollScopeContext(@NonNull IScopedInstrumentationAttributes enclosingScope private final AtomicInteger kafkaRecordsLeftToCommitEventually; private final AtomicBoolean kafkaRecordsReadyToCommit; - public TrackingKafkaConsumer(Consumer kafkaConsumer, String topic, + public TrackingKafkaConsumer(@NonNull IInstrumentationAttributes globalContext, + Consumer kafkaConsumer, String topic, Duration keepAliveInterval, Clock c, java.util.function.Consumer onCommitKeyCallback) { + this.globalContext = globalContext; this.kafkaConsumer = kafkaConsumer; this.topic = topic; this.clock = c; @@ -134,7 +147,7 @@ public TrackingKafkaConsumer(Consumer kafkaConsumer, String topi @Override public void onPartitionsRevoked(Collection partitions) { synchronized (commitDataLock) { - safeCommit(); + safeCommit(globalContext); partitions.forEach(p -> { var tp = new TopicPartition(topic, p.partition()); nextSetOfCommitsMap.remove(tp); @@ -174,7 +187,7 @@ public Optional getNextRequiredTouch() { return r; } - public void touch() { + public void touch(IInstrumentationAttributes context) { log.trace("touch() called."); pause(); try { @@ -191,7 +204,7 @@ public void touch() { } finally { resume(); } - safeCommit(); + safeCommit(context); lastTouchTimeRef.set(clock.instant()); } @@ -235,10 +248,11 @@ private Collection getActivePartitions() { } public Stream - getNextBatchOfRecords(IScopedInstrumentationAttributes context, + getNextBatchOfRecords(IInstrumentationAttributes context, BiFunction, T> builder) { + safeCommit(context); var records = safePollWithSwallowedRuntimeExceptions(context); - safeCommit(); + safeCommit(context); return applyBuilder(builder, records); } @@ -257,7 +271,7 @@ private Stream applyBuilder(BiFunction - safePollWithSwallowedRuntimeExceptions(IScopedInstrumentationAttributes context) { + safePollWithSwallowedRuntimeExceptions(IInstrumentationAttributes context) { try { lastTouchTimeRef.set(clock.instant()); ConsumerRecords records; @@ -322,13 +336,20 @@ private void addKeyContextForEventualCommit(ITrafficStreamKey streamKey, KafkaCo .add(new OrderedKeyHolder(kafkaTsk.getOffset(), streamKey)); } - private void safeCommit() { - var nextCommitsMapCopy = new HashMap(); + private void safeCommit(IInstrumentationAttributes incomingContext) { + HashMap nextCommitsMapCopy; + CommitScopeContext context = null; synchronized (commitDataLock) { + if (nextSetOfCommitsMap.isEmpty()) { + return; + } + context = new CommitScopeContext(incomingContext, + METERING_CLOSURE.makeSpanContinuation("commit")); + nextCommitsMapCopy = new HashMap<>(); nextCommitsMapCopy.putAll(nextSetOfCommitsMap); } try { - safeCommitStatic(kafkaConsumer, onCommitKeyCallback, nextCommitsMapCopy); + safeCommitStatic(context, kafkaConsumer, nextCommitsMapCopy); synchronized (commitDataLock) { nextCommitsMapCopy.entrySet().stream() .forEach(kvp->callbackUpTo(onCommitKeyCallback, @@ -356,16 +377,18 @@ private void safeCommit() { nextSetOfCommitsMap.entrySet().stream() .map(kvp -> kvp.getKey() + "->" + kvp.getValue()).collect(Collectors.joining(","))) .log(); + } finally { + if (context != null) { + context.close(); + } } } - private static void safeCommitStatic(Consumer kafkaConsumer, - java.util.function.Consumer onCommitKeyCallback, + private static void safeCommitStatic(CommitScopeContext context, Consumer kafkaConsumer, HashMap nextCommitsMap) { - if (!nextCommitsMap.isEmpty()) { - log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); - kafkaConsumer.commitSync(nextCommitsMap); - } + assert !nextCommitsMap.isEmpty(); + log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); + kafkaConsumer.commitSync(nextCommitsMap); } private static void callbackUpTo(java.util.function.Consumer onCommitKeyCallback, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java index 532d0b178..7311c7d9a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java @@ -3,13 +3,14 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; import lombok.NonNull; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Instant; -public abstract class AbstractNestedSpanContext +public abstract class AbstractNestedSpanContext implements IScopedInstrumentationAttributes, IWithStartTime, AutoCloseable { final T enclosingScope; @Getter final Instant startTime; @@ -21,14 +22,20 @@ public AbstractNestedSpanContext(T enclosingScope) { } @Override - public IScopedInstrumentationAttributes getEnclosingScope() { + public IInstrumentationAttributes getEnclosingScope() { return enclosingScope; } public T getImmediateEnclosingScope() { return enclosingScope; } protected void setCurrentSpan(@NonNull ISpanWithParentGenerator spanGenerator) { - setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan())); + // TODO - switch this to use a virtual function? + if (enclosingScope instanceof IScopedInstrumentationAttributes) { + setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), + ((IScopedInstrumentationAttributes) enclosingScope).getCurrentSpan())); + } else { + setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), null)); + } } protected void setCurrentSpanWithNoParent(@NonNull ISpanWithParentGenerator spanGenerator) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java index a2c8db819..40aba068d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/DirectNestedSpanContext.java @@ -1,10 +1,11 @@ package org.opensearch.migrations.replay.tracing; import lombok.NonNull; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; -public class DirectNestedSpanContext +public class DirectNestedSpanContext extends AbstractNestedSpanContext implements IWithTypedEnclosingScope { public DirectNestedSpanContext(T enclosingScope) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java index 1c8664c3b..513242c45 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java @@ -1,9 +1,10 @@ package org.opensearch.migrations.replay.tracing; import lombok.NonNull; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public abstract class IndirectNestedSpanContext +public abstract class IndirectNestedSpanContext extends AbstractNestedSpanContext { public IndirectNestedSpanContext(@NonNull D enclosingScope) { super(enclosingScope); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 28c7f1895..a38e2b019 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -8,6 +8,7 @@ import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; @@ -53,19 +54,12 @@ public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlo private final Semaphore readGate; private final Duration bufferTimeWindow; - public static class ReadChunkContext extends DirectNestedSpanContext { - @Getter IScopedInstrumentationAttributes enclosingScope; - - public ReadChunkContext(IScopedInstrumentationAttributes enclosingScope, + public static class ReadChunkContext extends DirectNestedSpanContext { + public ReadChunkContext(IInstrumentationAttributes enclosingScope, ISpanWithParentGenerator spanGenerator) { super(enclosingScope); setCurrentSpanWithNoParent(spanGenerator); } - - @Override - public void close() { - endSpan(); - } } public static class BackPressureBlockContext extends DirectNestedSpanContext { @@ -132,7 +126,7 @@ public CompletableFuture> readNextTrafficStreamChunk */ @Override public CompletableFuture> - readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + readNextTrafficStreamChunk(IInstrumentationAttributes context) { var readContext = new ReadChunkContext(context, METERING_CLOSURE.makeSpanContinuation("readNextTrafficStreamChunk")); log.info("BlockingTrafficSource::readNext"); @@ -187,7 +181,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { log.atDebug().setMessage(() -> "Next touch at " + nextInstant + " ... in " + waitIntervalMs + "ms (now=" + nowTime + ")").log(); if (waitIntervalMs <= 0) { - underlyingSource.touch(); + underlyingSource.touch(waitContext); } else { // if this doesn't succeed, we'll loop around & likely do a touch, then loop around again. // if it DOES succeed, we'll loop around and make sure that there's not another reason to stop @@ -208,8 +202,9 @@ private Void blockIfNeeded(ReadChunkContext readContext) { } @Override - public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { - var commitResult = underlyingSource.commitTrafficStream(trafficStreamKey); + public CommitResult commitTrafficStream(IInstrumentationAttributes context, + ITrafficStreamKey trafficStreamKey) throws IOException { + var commitResult = underlyingSource.commitTrafficStream(context, trafficStreamKey); if (commitResult == CommitResult.AfterNextRead) { readGate.drainPermits(); readGate.release(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java index 97e8e825e..df28d5aed 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay.traffic.source; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.Closeable; @@ -16,12 +17,13 @@ enum CommitResult { Immediate, AfterNextRead, BlockedByOtherCommits, Ignored } - CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context); + CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context); /** * Returns true if the committed results are immediate */ - CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException; + CommitResult commitTrafficStream(IInstrumentationAttributes context, + ITrafficStreamKey trafficStreamKey) throws IOException; default void close() throws IOException {} @@ -29,7 +31,7 @@ default void close() throws IOException {} * Keep-alive call to be used by the BlockingTrafficSource to keep this connection alive if * this is required. */ - default void touch() {} + default void touch(IInstrumentationAttributes context) {} /** * @return The time that the next call to touch() must be completed for this source to stay diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index d5cab1132..bf3cb0c2b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -57,7 +58,8 @@ public IChannelKeyContext getChannelKeyContext() { * * @return */ - public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + public CompletableFuture> + readNextTrafficStreamChunk(IInstrumentationAttributes context) { return CompletableFuture.supplyAsync(() -> { var builder = TrafficStream.newBuilder(); try { @@ -83,7 +85,7 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(IInstrumentationAttributes ctx, ITrafficStreamKey trafficStreamKey) { // do nothing - this datasource isn't transactional return CommitResult.Immediate; } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 78978e3f9..7273bc296 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; @@ -78,7 +79,8 @@ private static class TestTrafficCaptureSource implements ISimpleTrafficCaptureSo } @Override - public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + public CompletableFuture> + readNextTrafficStreamChunk(IInstrumentationAttributes context) { log.atTrace().setMessage(()->"Test.readNextTrafficStreamChunk.counter="+counter).log(); var i = counter.getAndIncrement(); if (i >= nStreamsToCreate) { @@ -106,7 +108,7 @@ public CompletableFuture> readNextTrafficStreamChunk public void close() throws IOException {} @Override - public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(IInstrumentationAttributes ctx, ITrafficStreamKey trafficStreamKey) { // do nothing return CommitResult.Immediate; } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java index 308fbfc3c..f7ced0dda 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java @@ -4,6 +4,7 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.EOFException; @@ -31,13 +32,13 @@ private static InputStreamOfTraffic getTrafficSource(String filename) throws IOE } @Override - public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(IInstrumentationAttributes ctx, ITrafficStreamKey trafficStreamKey) { // do nothing return CommitResult.Immediate; } @Override - public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { if (numberOfTrafficStreamsToRead.get() <= 0) { return CompletableFuture.failedFuture(new EOFException()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index 9d50a370c..af24ee152 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -20,6 +20,7 @@ import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; @@ -129,7 +130,7 @@ public void testDoubleRequestWithCloseIsCommittedOnce() throws Throwable { .build(); var trafficSource = new ArrayCursorTrafficCaptureSource(new ArrayCursorTrafficSourceFactory(List.of(trafficStream))); - var tr = new TrafficReplayer(httpServer.localhostEndpoint(), null, + var tr = new TrafficReplayer(TestContext.singleton, httpServer.localhostEndpoint(), null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10*1024); @@ -231,7 +232,7 @@ public ArrayCursorTrafficCaptureSource(ArrayCursorTrafficSourceFactory arrayCurs } @Override - public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { var idx = readCursor.getAndIncrement(); log.info("reading chunk from index="+idx); if (arrayCursorTrafficSourceFactory.trafficStreamsList.size() <= idx) { @@ -247,7 +248,7 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(IInstrumentationAttributes ctx, ITrafficStreamKey trafficStreamKey) { synchronized (pQueue) { // figure out if I need to do something more efficient later log.info("Commit called for "+trafficStreamKey+" with pQueue.size="+pQueue.size()); var incomingCursor = ((TrafficStreamCursorKey)trafficStreamKey).arrayIndex; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index 6a163287b..0c8d32bdb 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -96,7 +96,7 @@ public void fullTest(int testSize, boolean randomize) throws Throwable { TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(streamAndConsumer.numHttpTransactions, httpServer.localhostEndpoint(), new CounterLimitedReceiverFactory(), () -> new SentinelSensingTrafficSource( - new KafkaTrafficCaptureSource(buildKafkaConsumer(), TEST_TOPIC_NAME, + new KafkaTrafficCaptureSource(TestContext.singleton, buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)))); log.info("done"); } @@ -176,7 +176,7 @@ Producer buildKafkaProducer() { throw Lombok.sneakyThrow(e); } }); - return () -> new KafkaTrafficCaptureSource(kafkaConsumer, TEST_TOPIC_NAME, + return () -> new KafkaTrafficCaptureSource(TestContext.singleton, kafkaConsumer, TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java index c0b2a604f..703d3f196 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java @@ -4,6 +4,7 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.EOFException; @@ -25,7 +26,7 @@ public SentinelSensingTrafficSource(ISimpleTrafficCaptureSource underlyingSource } @Override - public CompletableFuture> readNextTrafficStreamChunk(IScopedInstrumentationAttributes context) { + public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { if (stopReadingRef.get()) { return CompletableFuture.failedFuture(new EOFException()); } @@ -45,8 +46,9 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { - return underlyingSource.commitTrafficStream(trafficStreamKey); + public CommitResult commitTrafficStream(IInstrumentationAttributes context, + ITrafficStreamKey trafficStreamKey) throws IOException { + return underlyingSource.commitTrafficStream(context, trafficStreamKey); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index e5ce4b09d..0b8a56c75 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -140,7 +140,7 @@ private static void runTrafficReplayer(Supplier cap URI endpoint, Consumer tupleReceiver) throws Exception { log.info("Starting a new replayer and running it"); - var tr = new TrafficReplayer(endpoint, null, + var tr = new TrafficReplayer(TestContext.singleton, endpoint, null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10*1024); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 4c79a62c5..46b5f425c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -151,7 +151,8 @@ static byte[] synthesizeTrafficStreamsIntoByteArray(Instant timestamp, int numSt @Test public void testReader() throws Exception { - var tr = new TrafficReplayer(new URI("http://localhost:9200"), null, null, false); + var tr = new TrafficReplayer(TestContext.singleton, + new URI("http://localhost:9200"), null, null, false); List> byteArrays = new ArrayList<>(); CapturedTrafficToHttpTransactionAccumulator trafficAccumulator = new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, @@ -202,7 +203,8 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel @Test public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { - var tr = new TrafficReplayer(new URI("http://localhost:9200"), null, null, false); + var tr = new TrafficReplayer(TestContext.singleton, + new URI("http://localhost:9200"), null, null, false); List> byteArrays = new ArrayList<>(); var remainingAccumulations = new AtomicInteger(); CapturedTrafficToHttpTransactionAccumulator trafficAccumulator = diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index e64108a78..8b2133b7a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -18,6 +18,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.replay.TestContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.testcontainers.containers.KafkaContainer; @@ -82,7 +83,7 @@ private KafkaConsumer buildKafkaConsumer() { @Test @Tag("longTest") public void testThatCommitsAndReadsKeepWorking() throws Exception { - var kafkaSource = new KafkaTrafficCaptureSource(buildKafkaConsumer(), + var kafkaSource = new KafkaTrafficCaptureSource(TestContext.singleton, buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS/3)); var blockingSource = new BlockingTrafficSource(kafkaSource, Duration.ofMinutes(5)); var kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); @@ -102,7 +103,7 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { var ts = chunks.get(0); Thread.sleep(DEFAULT_POLL_INTERVAL_MS*2); log.info("committing "+ts.getKey()); - blockingSource.commitTrafficStream(ts.getKey()); + blockingSource.commitTrafficStream(TestContext.singleton, ts.getKey()); blockingSource.stopReadsPast(getTimeAtPoint(i)); } } catch (Exception e) { @@ -121,10 +122,10 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { } } } - +// // var spans = testSpanExporter.getFinishedSpanItems(); // Assertions.assertFalse(spans.isEmpty(), "No spans were found"); - +// // var metrics = testMetricExporter.getFinishedMetricItems(); // Assertions.assertFalse(metrics.isEmpty(), "No metrics were found"); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java index 6a0dfb129..105ee2b94 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java @@ -9,6 +9,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.replay.TestContext; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.testcontainers.containers.KafkaContainer; @@ -68,7 +69,8 @@ private void setupTestCase() throws Exception { kafkaProperties.put(HEARTBEAT_INTERVAL_MS_KEY, HEARTBEAT_INTERVAL_MS+""); kafkaProperties.put("max.poll.records", 1); var kafkaConsumer = new KafkaConsumer(kafkaProperties); - this.kafkaSource = new KafkaTrafficCaptureSource(kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); + this.kafkaSource = new KafkaTrafficCaptureSource(TestContext.singleton, + kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); this.trafficSource = new BlockingTrafficSource(kafkaSource, Duration.ZERO); this.keysReceived = new ArrayList<>(); @@ -86,7 +88,7 @@ public void testTimeoutsDontOccurForSlowPolls() throws Exception { try { var k = keysReceived.get(0); log.info("Calling commit traffic stream for "+k); - trafficSource.commitTrafficStream(k); + trafficSource.commitTrafficStream(TestContext.singleton, k); log.info("finished committing traffic stream"); log.info("Stop reads to infinity"); // this is a way to signal back to the main thread that this thread is done @@ -112,7 +114,7 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti } readNextNStreams(trafficSource, keysReceived, 1, 1); - trafficSource.commitTrafficStream(keysReceived.get(0)); + trafficSource.commitTrafficStream(TestContext.singleton, keysReceived.get(0)); log.info("Called commitTrafficStream but waiting long enough for the client to leave the group. " + "That will make the previous commit a 'zombie-commit' that should easily be dropped."); @@ -135,7 +137,7 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti keysReceived = new ArrayList<>(); log.atInfo().setMessage(()->"re-establish... 3 ..."+renderNextCommitsAsString()).log(); readNextNStreams(trafficSource, keysReceived, 0, 1); - trafficSource.commitTrafficStream(keysReceivedUntilDrop1.get(1)); + trafficSource.commitTrafficStream(TestContext.singleton, keysReceivedUntilDrop1.get(1)); log.atInfo().setMessage(()->"re-establish... 4 ..."+renderNextCommitsAsString()).log(); readNextNStreams(trafficSource, keysReceived, 1, 1); log.atInfo().setMessage(()->"5 ..."+renderNextCommitsAsString()).log(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index 555f146ea..209bb91af 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -42,8 +42,8 @@ public void testTrafficCaptureSource() throws Exception { final long MAX_POLL_MS = 10000; kafkaConsumerProps.setProperty(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY, MAX_POLL_MS+""); var kafkaConsumer = new KafkaConsumer(kafkaConsumerProps); - var kafkaTrafficCaptureSource = new KafkaTrafficCaptureSource(kafkaConsumer, testTopicName, - Duration.ofMillis(MAX_POLL_MS)); + var kafkaTrafficCaptureSource = new KafkaTrafficCaptureSource(TestContext.singleton, + kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_MS)); var kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); var sendCompleteCount = new AtomicInteger(0); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index 526efda8c..c14b3085a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -53,8 +53,8 @@ public void testRecordToString() { public void testSupplyTrafficFromSource() { int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); - KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(mockConsumer, TEST_TOPIC_NAME, - Duration.ofHours(1)); + KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.singleton, + mockConsumer, TEST_TOPIC_NAME, Duration.ofHours(1)); initializeMockConsumerTopic(mockConsumer); List substreamCounts = new ArrayList<>(); @@ -95,8 +95,8 @@ public void testSupplyTrafficFromSource() { public void testSupplyTrafficWithUnformattedMessages() { int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); - KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(mockConsumer, TEST_TOPIC_NAME, - Duration.ofHours(1)); + KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.singleton, + mockConsumer, TEST_TOPIC_NAME, Duration.ofHours(1)); initializeMockConsumerTopic(mockConsumer); List substreamCounts = new ArrayList<>(); From 195d0badcd7f87dfd9925b1904c3f523853817d7 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 21 Dec 2023 11:30:47 -0500 Subject: [PATCH 33/94] Checkpoint/WIP - More spans across the board, specifically through the target transaction. Unfortunately, the TrafficReplayer fails to load because of a race condition around statically initializing the Otel SDK. However, all unit tests are working, so this is a checkpoint release before I remove the reliance on static otel initialization and move toward it being done via contexts. Signed-off-by: Greg Schohn --- ...edTrafficToHttpTransactionAccumulator.java | 7 +- .../replay/RequestSenderOrchestrator.java | 59 +++++++++++------ .../migrations/replay/TrafficReplayer.java | 12 +++- .../NettyPacketToHttpConsumer.java | 66 +++++++++++++------ .../replay/kafka/TrackingKafkaConsumer.java | 57 +++++++++++----- .../replay/netty/BacksideSnifferHandler.java | 9 ++- .../migrations/replay/tracing/Contexts.java | 40 ++++++++--- .../migrations/replay/tracing/IContexts.java | 12 +++- .../traffic/source/BlockingTrafficSource.java | 15 +++-- 9 files changed, 195 insertions(+), 82 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index e89db0c3c..be78a9d83 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -111,9 +111,8 @@ public void onRequestReceived(IContexts.IRequestAccumulationContext requestCtx, public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @NonNull RequestResponsePacketPair rrpp) { - var responseCtx = rrpp.getResponseContext(); - responseCtx.endSpan(); - underlying.onFullDataReceived(key, responseCtx.getLogicalEnclosingScope(), rrpp); + rrpp.getResponseContext().endSpan(); + underlying.onFullDataReceived(key, rrpp.getHttpTransactionContext(), rrpp); } public void onConnectionClose(@NonNull Accumulation accum, @@ -419,7 +418,7 @@ private boolean handleEndOfRequest(Accumulation accumulation) { private void handleEndOfResponse(Accumulation accumulation, RequestResponsePacketPair.ReconstructionStatus status) { assert accumulation.state == Accumulation.State.ACCUMULATING_WRITES; var rrPair = accumulation.getRrPair(); - var requestKey = rrPair.getResponseContext().getLogicalEnclosingScope().getReplayerRequestKey(); + var requestKey = rrPair.getHttpTransactionContext().getReplayerRequestKey(); metricsLogger.atSuccess(MetricsEvent.ACCUMULATED_FULL_CAPTURED_SOURCE_RESPONSE) .setAttribute(MetricsAttributeKey.REQUEST_ID, requestKey.toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index deb9ecc89..6d088e99d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -15,6 +15,7 @@ import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; +import org.opensearch.migrations.tracing.SimpleMeteringClosure; import java.time.Duration; import java.time.Instant; @@ -45,20 +46,24 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); log.atDebug().setMessage(()->"Scheduling work for "+ctx.getConnectionId()+" at time "+timestamp).log(); + var scheduledContext = new Contexts.ScheduledContext(ctx, + new SimpleMeteringClosure("RSO").makeSpanContinuation("scheduled")); // this method doesn't use the scheduling that scheduleRequest and scheduleClose use because // doing work associated with a connection is considered to be preprocessing work independent // of the underlying network connection itself, so it's fair to be able to do this without // first needing to wait for a connection to succeed. In fact, making them more independent // means that the work item being enqueued is less likely to cause a connection timeout. - connectionSession.eventLoop.schedule(()-> - task.get().map(f->f.whenComplete((v,t) -> { - if (t!=null) { - finalTunneledResponse.future.completeExceptionally(t); - } else { - finalTunneledResponse.future.complete(v); - } - }), - ()->""), + connectionSession.eventLoop.schedule(()-> { + scheduledContext.close(); + return task.get().map(f -> f.whenComplete((v, t) -> { + if (t != null) { + finalTunneledResponse.future.completeExceptionally(t); + } else { + finalTunneledResponse.future.complete(v); + } + }), + () -> ""); + }, getDelayFromNowMs(timestamp), TimeUnit.MILLISECONDS); return finalTunneledResponse; } @@ -169,9 +174,11 @@ private void scheduleOnConnectionReplaySession(IChannelKeyContext ctx, int c eventLoop.schedule(task.runnable, getDelayFromNowMs(atTime), TimeUnit.MILLISECONDS); scheduledFuture.addListener(f->{ if (!f.isSuccess()) { - log.atError().setCause(f.cause()).setMessage(()->"Error scheduling task for " + ctx).log(); + log.atError().setCause(f.cause()).setMessage(()->"Error running the scheduled task: " + ctx + + " interaction: " + channelInteraction).log(); } else { - log.atInfo().setMessage(()->"scheduled future has finished for "+channelInteraction).log(); + log.atInfo().setMessage(()->"scheduled task has finished for " + ctx + " interaction: " + + channelInteraction).log(); } }); } else { @@ -194,7 +201,7 @@ private void scheduleOnConnectionReplaySession(IChannelKeyContext ctx, int c var sf = eventLoop.schedule(runnable, getDelayFromNowMs(kvp.getKey()), TimeUnit.MILLISECONDS); sf.addListener(sfp->{ if (!sfp.isSuccess()) { - log.atWarn().setCause(sfp.cause()).setMessage(()->"Scheduled future was not successful for " + + log.atWarn().setCause(sfp.cause()).setMessage(()->"Scheduled future did not successfully run " + channelInteraction).log(); } }); @@ -208,10 +215,18 @@ private void scheduleSendOnConnectionReplaySession(IContexts.IReplayerHttpTransa Instant start, Duration interval, Stream packets) { var eventLoop = channelFutureAndRequestSchedule.eventLoop; var packetReceiverRef = new AtomicReference(); - Runnable packetSender = () -> sendNextPartAndContinue(() -> - getPacketReceiver(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), - packetReceiverRef), - eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); + Runnable packetSender = () -> { + try (var targetContext = new Contexts.TargetRequestContext(ctx, + new SimpleMeteringClosure("RSO").makeSpanContinuation("targetTransaction")); + var requestContext = new Contexts.RequestSendingContext(targetContext, + new SimpleMeteringClosure("RSO").makeSpanContinuation("requestSending"))) { + sendNextPartAndContinue(() -> + memoizePacketConsumer(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), + packetReceiverRef), + eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture, + targetContext, requestContext); + } + }; scheduleOnConnectionReplaySession(ctx.getLogicalEnclosingScope(), ctx.getReplayerRequestKey().getSourceRequestIndex(), channelFutureAndRequestSchedule, responseFuture, start, @@ -242,18 +257,21 @@ private long getDelayFromNowMs(Instant to) { } private static NettyPacketToHttpConsumer - getPacketReceiver(IContexts.IReplayerHttpTransactionContext httpTransactionContext, ChannelFuture channelFuture, - AtomicReference packetReceiver) { + memoizePacketConsumer(IContexts.IReplayerHttpTransactionContext httpTransactionContext, ChannelFuture channelFuture, + AtomicReference packetReceiver) { if (packetReceiver.get() == null) { packetReceiver.set(new NettyPacketToHttpConsumer(channelFuture, httpTransactionContext)); } return packetReceiver.get(); } + // TODO - rewrite this - the recursion (at least as it is) is terribly confusing private void sendNextPartAndContinue(Supplier packetHandlerSupplier, EventLoop eventLoop, Iterator iterator, Instant start, Duration interval, AtomicInteger counter, - StringTrackableCompletableFuture responseFuture) { + StringTrackableCompletableFuture responseFuture, + Contexts.TargetRequestContext targetContext, + Contexts.RequestSendingContext requestContext) { log.atTrace().setMessage(()->"sendNextPartAndContinue: counter=" + counter.get()).log(); var packetReceiver = packetHandlerSupplier.get(); assert iterator.hasNext() : "Should not have called this with no items to send"; @@ -262,12 +280,13 @@ private void sendNextPartAndContinue(Supplier packetH if (iterator.hasNext()) { counter.incrementAndGet(); Runnable packetSender = () -> sendNextPartAndContinue(packetHandlerSupplier, eventLoop, - iterator, start, interval, counter, responseFuture); + iterator, start, interval, counter, responseFuture, targetContext, requestContext); var delayMs = Duration.between(now(), start.plus(interval.multipliedBy(counter.get()))).toMillis(); eventLoop.schedule(packetSender, Math.min(0, delayMs), TimeUnit.MILLISECONDS); } else { packetReceiver.finalizeRequest().handle((v,t)-> { + targetContext.close(); if (t != null) { responseFuture.future.completeExceptionally(t); } else { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index d104db4cb..bf805cdd8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -14,6 +14,7 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.tracing.EmptyContext; @@ -76,6 +77,8 @@ public class TrafficReplayer { private static final MetricsLogger TUPLE_METRICS_LOGGER = new MetricsLogger("SourceTargetCaptureTuple"); + private static final SimpleMeteringClosure METERING_CLOSURE = + new SimpleMeteringClosure("TrafficReplayer"); public static final String SIGV_4_AUTH_HEADER_SERVICE_REGION_ARG = "--sigv4-auth-header-service-region"; public static final String AUTH_HEADER_VALUE_ARG = "--auth-header-value"; @@ -650,12 +653,16 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, @NonNull UniqueReplayerRequestKey requestKey, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { + var httpContext = rrPair.getHttpTransactionContext(); try { // if this comes in with a serious Throwable (not an Exception), don't bother // packaging it up and calling the callback. // Escalate it up out handling stack and shutdown. if (t == null || t instanceof Exception) { - packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); + try (var tupleHandlingContext = new Contexts.TupleHandlingContext(httpContext, + METERING_CLOSURE.makeSpanContinuation("tupleHandling"))) { + packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); + } commitTrafficStreams(context, rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); return null; } else { @@ -680,7 +687,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, .log(); throw e; } finally { - rrPair.getHttpTransactionContext().endSpan(); + httpContext.endSpan(); requestToFinalWorkFuturesMap.remove(requestKey); log.trace("removed rrPair.requestData to " + "targetTransactionInProgressMap for " + @@ -893,7 +900,6 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture> packetsSupplier) { - // TODO - add context chaining try { var transformationCompleteFuture = replayEngine.scheduleTransformationWork(ctx, start, ()-> transformAllData(inputRequestTransformerFactory.create(requestKey, ctx), packetsSupplier)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 5d68e0326..4fb774691 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -25,6 +25,8 @@ import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; @@ -58,7 +60,8 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - IContexts.IReplayerHttpTransactionContext tracingContext; + IContexts.ITargetRequestContext parentContext; + IScopedInstrumentationAttributes currentRequestContext; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, Contexts.HttpTransactionContext httpTransactionContext) { @@ -67,7 +70,10 @@ public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri } public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IContexts.IReplayerHttpTransactionContext ctx) { - this.tracingContext = ctx; + this.parentContext = new Contexts.TargetRequestContext(ctx, + METERING_CLOSURE.makeSpanContinuation("targetTransaction")); + this.currentRequestContext = new Contexts.RequestSendingContext(this.parentContext, + METERING_CLOSURE.makeSpanContinuation("sendingRequest")); responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), @@ -156,7 +162,12 @@ private void activateChannelForThisConsumer() { } var pipeline = channel.pipeline(); addLoggingHandler(pipeline, "B"); - pipeline.addLast(new BacksideSnifferHandler(responseBuilder)); + pipeline.addLast(new BacksideSnifferHandler(responseBuilder, ()->{ + this.currentRequestContext.close(); + this.currentRequestContext = new Contexts.ReceivingHttpResponseContext(this.parentContext, + METERING_CLOSURE.makeSpanContinuation("receivingRequest")); + + })); addLoggingHandler(pipeline, "C"); pipeline.addLast(new HttpResponseDecoder()); addLoggingHandler(pipeline, "D"); @@ -177,13 +188,18 @@ private static void addLoggingHandler(ChannelPipeline pipeline, String name) { } private void deactivateChannel() { - var pipeline = channel.pipeline(); - log.atDebug().setMessage(()->"Resetting the pipeline currently at: " + pipeline).log(); - while (!(pipeline.last() instanceof SslHandler) && (pipeline.last() != null)) { - pipeline.removeLast(); + try { + var pipeline = channel.pipeline(); + log.atDebug().setMessage(() -> "Resetting the pipeline currently at: " + pipeline).log(); + while (!(pipeline.last() instanceof SslHandler) && (pipeline.last() != null)) { + pipeline.removeLast(); + } + channel.config().setAutoRead(false); + log.atDebug().setMessage(() -> "Reset the pipeline back to: " + pipeline).log(); + } finally { + currentRequestContext.close(); + parentContext.close(); } - channel.config().setAutoRead(false); - log.atDebug().setMessage(()->"Reset the pipeline back to: " + pipeline).log(); } @Override @@ -194,8 +210,9 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa System.identityHashCode(packetData) + ")").log(); return writePacketAndUpdateFuture(packetData); } else { - log.atWarn().setMessage(()->tracingContext.getReplayerRequestKey() + "outbound channel was not set " + - "up successfully, NOT writing bytes hash=" + System.identityHashCode(packetData)).log(); + log.atWarn().setMessage(()-> httpContext().getReplayerRequestKey() + + "outbound channel was not set up successfully, NOT writing bytes hash=" + + System.identityHashCode(packetData)).log(); channel.close(); return DiagnosticTrackableCompletableFuture.Factory.failedFuture(channelInitException, ()->""); } @@ -205,18 +222,22 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa return activeChannelFuture; } + private IContexts.IReplayerHttpTransactionContext httpContext() { + return parentContext.getLogicalEnclosingScope(); + } + private DiagnosticTrackableCompletableFuture writePacketAndUpdateFuture(ByteBuf packetData) { final var completableFuture = new DiagnosticTrackableCompletableFuture(new CompletableFuture<>(), ()->"CompletableFuture that will wait for the netty future to fill in the completion value"); final int readableBytes = packetData.readableBytes(); - METERING_CLOSURE.meterIncrementEvent(tracingContext, "readBytes", packetData.readableBytes()); + METERING_CLOSURE.meterIncrementEvent(currentRequestContext, "readBytes", packetData.readableBytes()); channel.writeAndFlush(packetData) .addListener((ChannelFutureListener) future -> { Throwable cause = null; try { if (!future.isSuccess()) { - log.atWarn().setMessage(()->tracingContext.getReplayerRequestKey() + "closing outbound channel " + + log.atWarn().setMessage(()-> httpContext().getReplayerRequestKey() + "closing outbound channel " + "because WRITE future was not successful " + future.cause() + " hash=" + System.identityHashCode(packetData) + " will be sending the exception to " + completableFuture).log(); @@ -235,9 +256,11 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa " an exception :" + packetData + " hash=" + System.identityHashCode(packetData)).log(); metricsLogger.atError(MetricsEvent.WRITING_REQUEST_COMPONENT_FAILED, cause) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()) - .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getReplayerRequestKey().toString()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, + httpContext().getReplayerRequestKey().toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, - tracingContext.getReplayerRequestKey().getTrafficStreamKey().getConnectionId()).emit(); + httpContext().getReplayerRequestKey().getTrafficStreamKey().getConnectionId()) + .emit(); completableFuture.future.completeExceptionally(cause); channel.close(); } @@ -246,9 +269,8 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa ". Created future for writing data="+completableFuture).log(); metricsLogger.atSuccess(MetricsEvent.WROTE_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()) - .setAttribute(MetricsAttributeKey.REQUEST_ID, tracingContext.getReplayerRequestKey()) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, - tracingContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.REQUEST_ID, httpContext().getReplayerRequestKey()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, httpContext().getConnectionId()) .setAttribute(MetricsAttributeKey.SIZE_IN_BYTES, readableBytes).emit(); return completableFuture; } @@ -257,13 +279,19 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa public DiagnosticTrackableCompletableFuture finalizeRequest() { var ff = activeChannelFuture.getDeferredFutureThroughHandle((v,t)-> { + this.currentRequestContext.close(); + this.currentRequestContext = new Contexts.WaitingForHttpResponseContext(parentContext, + new SimpleMeteringClosure("RSO").makeSpanContinuation("waitingForResponse")); + var future = new CompletableFuture(); var rval = new DiagnosticTrackableCompletableFuture(future, ()->"NettyPacketToHttpConsumer.finalizeRequest()"); if (t == null) { var responseWatchHandler = (BacksideHttpWatcherHandler) channel.pipeline().get(BACKSIDE_HTTP_WATCHER_HANDLER_NAME); - responseWatchHandler.addCallback(future::complete); + responseWatchHandler.addCallback(value -> { + future.complete(value); + }); } else { future.complete(responseBuilder.addErrorCause(t).build()); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 8fc931bb1..2472ebeb0 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -75,6 +75,14 @@ public int hashCode() { } } + public static class TouchScopeContext extends DirectNestedSpanContext { + public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope, + @NonNull ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + public static class PollScopeContext extends DirectNestedSpanContext { public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope, @NonNull ISpanWithParentGenerator spanGenerator) { @@ -91,6 +99,14 @@ public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope, } } + public static class KafkaCommitScopeContext extends DirectNestedSpanContext { + public KafkaCommitScopeContext(@NonNull CommitScopeContext enclosingScope, + @NonNull ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + /** * The keep-alive should already be set to a fraction of the max poll timeout for * the consumer (done outside of this class). The keep-alive tells this class how @@ -188,24 +204,28 @@ public Optional getNextRequiredTouch() { } public void touch(IInstrumentationAttributes context) { - log.trace("touch() called."); - pause(); - try { - var records = kafkaConsumer.poll(Duration.ZERO); - if (!records.isEmpty()) { - throw new IllegalStateException("Expected no entries once the consumer was paused. " + - "This may have happened because a new assignment slipped into the consumer AFTER pause calls."); + try (var touchCtx = new TouchScopeContext(context, + METERING_CLOSURE.makeSpanContinuation("touch"))) { + log.trace("touch() called."); + pause(); + try (var pollCtx = new PollScopeContext(touchCtx, + METERING_CLOSURE.makeSpanContinuation("kafkaPoll"))) { + var records = kafkaConsumer.poll(Duration.ZERO); + if (!records.isEmpty()) { + throw new IllegalStateException("Expected no entries once the consumer was paused. " + + "This may have happened because a new assignment slipped into the consumer AFTER pause calls."); + } + } catch (IllegalStateException e) { + throw e; + } catch (RuntimeException e) { + log.atWarn().setCause(e).setMessage("Unable to poll the topic: " + topic + " with our Kafka consumer. " + + "Swallowing and awaiting next metadata refresh to try again.").log(); + } finally { + resume(); } - } catch (IllegalStateException e) { - throw e; - } catch (RuntimeException e) { - log.atWarn().setCause(e).setMessage("Unable to poll the topic: " + topic + " with our Kafka consumer. " + - "Swallowing and awaiting next metadata refresh to try again.").log(); - } finally { - resume(); + safeCommit(context); + lastTouchTimeRef.set(clock.instant()); } - safeCommit(context); - lastTouchTimeRef.set(clock.instant()); } private void pause() { @@ -388,7 +408,10 @@ private static void safeCommitStatic(CommitScopeContext context, Consumer nextCommitsMap) { assert !nextCommitsMap.isEmpty(); log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); - kafkaConsumer.commitSync(nextCommitsMap); + try (var kafkaContext = new KafkaCommitScopeContext(context, + METERING_CLOSURE.makeSpanContinuation("kafkaCommit"));) { + kafkaConsumer.commitSync(nextCommitsMap); + } } private static void callbackUpTo(java.util.function.Consumer onCommitKeyCallback, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java index 730bd58ee..8dccefcc6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java @@ -13,10 +13,13 @@ public class BacksideSnifferHandler extends ChannelInboundHandlerAdapter { private final AggregatedRawResponse.Builder aggregatedRawResponseBuilder; + private Runnable firstByteReceivedCallback; private static final MetricsLogger metricsLogger = new MetricsLogger("BacksideSnifferHandler"); - public BacksideSnifferHandler(AggregatedRawResponse.Builder aggregatedRawResponseBuilder) { + public BacksideSnifferHandler(AggregatedRawResponse.Builder aggregatedRawResponseBuilder, + Runnable firstByteReceivedCallback) { this.aggregatedRawResponseBuilder = aggregatedRawResponseBuilder; + this.firstByteReceivedCallback = firstByteReceivedCallback; } @Override @@ -28,6 +31,10 @@ public void channelActive(ChannelHandlerContext ctx) { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { var bb = (ByteBuf) msg; + if (firstByteReceivedCallback != null && bb.readableBytes() > 0) { + firstByteReceivedCallback.run(); + firstByteReceivedCallback = null; + } byte[] output = new byte[bb.readableBytes()]; bb.readBytes(output); aggregatedRawResponseBuilder.addResponsePacket(output); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java index d8f111dc3..2fff0f2f5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java @@ -118,36 +118,56 @@ public RequestTransformationContext(IContexts.IReplayerHttpTransactionContext en } } - public static class WaitingForHttpResponseContext + public static class ScheduledContext extends DirectNestedSpanContext - implements IContexts.IWaitingForHttpResponseContext { - public WaitingForHttpResponseContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + implements IContexts.IScheduledContext { + public ScheduledContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { super(enclosingScope); setCurrentSpan(spanGenerator); } } - public static class ReceivingHttpResponseContext + public static class TargetRequestContext extends DirectNestedSpanContext - implements IContexts.IReceivingHttpResponseContext { - public ReceivingHttpResponseContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + implements IContexts.ITargetRequestContext { + public TargetRequestContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { super(enclosingScope); setCurrentSpan(spanGenerator); } } public static class RequestSendingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IContexts.IRequestSendingContext { - public RequestSendingContext(IContexts.IReplayerHttpTransactionContext enclosingScope, + public RequestSendingContext(IContexts.ITargetRequestContext enclosingScope, ISpanWithParentGenerator spanGenerator) { super(enclosingScope); setCurrentSpan(spanGenerator); } } + public static class WaitingForHttpResponseContext + extends DirectNestedSpanContext + implements IContexts.IWaitingForHttpResponseContext { + public WaitingForHttpResponseContext(IContexts.ITargetRequestContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + + public static class ReceivingHttpResponseContext + extends DirectNestedSpanContext + implements IContexts.IReceivingHttpResponseContext { + public ReceivingHttpResponseContext(IContexts.ITargetRequestContext enclosingScope, + ISpanWithParentGenerator spanGenerator) { + super(enclosingScope); + setCurrentSpan(spanGenerator); + } + } + public static class TupleHandlingContext extends DirectNestedSpanContext implements IContexts.ITupleHandlingContext { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java index 70d297f85..24df98e07 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java @@ -65,14 +65,20 @@ public interface IResponseAccumulationContext public interface IRequestTransformationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public interface IWaitingForHttpResponseContext + public interface IScheduledContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } - public interface IReceivingHttpResponseContext + public interface ITargetRequestContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public interface IRequestSendingContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + + public interface IWaitingForHttpResponseContext + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + + public interface IReceivingHttpResponseContext + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } public interface ITupleHandlingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index a38e2b019..2aac42f2b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -164,8 +164,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { blockContext = new BackPressureBlockContext(readContext, METERING_CLOSURE.makeSpanContinuation("backPressureBlock")); } - try (var waitContext = new WaitForNextSignal(blockContext, - METERING_CLOSURE.makeSpanContinuation("waitForNextBackPressureCheck"))) { + try { log.atInfo().setMessage("blocking until signaled to read the next chunk last={} stop={}") .addArgument(lastTimestampSecondsRef.get()) .addArgument(stopReadingAtRef.get()) @@ -173,7 +172,10 @@ private Void blockIfNeeded(ReadChunkContext readContext) { var nextTouchOp = underlyingSource.getNextRequiredTouch(); if (nextTouchOp.isEmpty()) { log.trace("acquiring readGate semaphore (w/out timeout)"); - readGate.acquire(); + try (var waitContext = new WaitForNextSignal(blockContext, + METERING_CLOSURE.makeSpanContinuation("waitForNextBackPressureCheck"))) { + readGate.acquire(); + } } else { var nextInstant = nextTouchOp.get(); final var nowTime = Instant.now(); @@ -181,12 +183,15 @@ private Void blockIfNeeded(ReadChunkContext readContext) { log.atDebug().setMessage(() -> "Next touch at " + nextInstant + " ... in " + waitIntervalMs + "ms (now=" + nowTime + ")").log(); if (waitIntervalMs <= 0) { - underlyingSource.touch(waitContext); + underlyingSource.touch(blockContext); } else { // if this doesn't succeed, we'll loop around & likely do a touch, then loop around again. // if it DOES succeed, we'll loop around and make sure that there's not another reason to stop log.atTrace().setMessage(() -> "acquring readGate semaphore with timeout=" + waitIntervalMs).log(); - readGate.tryAcquire(waitIntervalMs, TimeUnit.MILLISECONDS); + try (var waitContext = new WaitForNextSignal(blockContext, + METERING_CLOSURE.makeSpanContinuation("waitForNextBackPressureCheck"))) { + readGate.tryAcquire(waitIntervalMs, TimeUnit.MILLISECONDS); + } } } } catch (InterruptedException e) { From bef99447efd4ffcfd071e0b08882eff1b77b78dd Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 21 Dec 2023 22:04:51 -0500 Subject: [PATCH 34/94] Test bugfix. toString() wasn't threadsafe. Now it is. Signed-off-by: Greg Schohn --- .../proxyserver/netty/ExpiringSubstitutableItemPool.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPool.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPool.java index 34dd2a5f8..46a20b517 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPool.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPool.java @@ -303,7 +303,12 @@ private void beginLoadingNewItemIfNecessary() { } @Override + @SneakyThrows public String toString() { + return eventLoop.submit(() -> toStringOnThread()).get(); + } + + private String toStringOnThread() { final StringBuilder sb = new StringBuilder("ExpiringSubstitutableItemPool{"); sb.append("poolSize=").append(poolSize); if (eventLoop.inEventLoop()) { From 8b50c89e758fc4318e66f4b8aaa18006af2f0a59 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 21 Dec 2023 22:12:06 -0500 Subject: [PATCH 35/94] Refactoring and code consolidation around context management. The static initializer race conditions have been resolved (no more static otel for tracing and metering) and the E2E solution is functional again. Signed-off-by: Greg Schohn --- .../captureKafkaOffloader/build.gradle | 1 + .../kafkaoffloader/KafkaCaptureFactory.java | 3 +- .../tracing/KafkaRecordContext.java | 20 +-- .../KafkaCaptureFactoryTest.java | 5 +- .../tracing/ConnectionContext.java | 16 +-- TrafficCapture/coreUtilities/build.gradle | 5 + .../tracing/AbstractNestedSpanContext.java | 20 +-- .../tracing/DirectNestedSpanContext.java | 7 +- .../migrations/tracing/EmptyContext.java | 20 --- .../tracing/IInstrumentConstructor.java | 8 ++ .../tracing/IInstrumentationAttributes.java | 4 + .../IScopedInstrumentationAttributes.java | 15 ++- .../tracing/IndirectNestedSpanContext.java | 13 ++ .../migrations/tracing/RootOtelContext.java | 123 ++++++++++++++++++ .../tracing/SimpleMeteringClosure.java | 65 --------- .../migrations/tracing/TestContext.java | 21 +++ ...nditionallyReliableLoggingHttpHandler.java | 6 +- .../netty/LoggingHttpHandler.java | 38 ++---- .../netty/tracing/HttpMessageContext.java | 39 ++++-- ...ionallyReliableLoggingHttpHandlerTest.java | 13 +- .../proxyserver/CaptureProxy.java | 7 +- .../netty/NettyScanningHttpProxy.java | 6 +- .../netty/ProxyChannelInitializer.java | 15 ++- .../netty/NettyScanningHttpProxyTest.java | 5 +- TrafficCapture/trafficReplayer/build.gradle | 2 + .../replay/RequestResponsePacketPair.java | 9 +- .../replay/RequestSenderOrchestrator.java | 9 +- .../replay/TrafficCaptureSourceFactory.java | 5 +- .../migrations/replay/TrafficReplayer.java | 14 +- .../NettyPacketToHttpConsumer.java | 12 +- .../http/HttpJsonTransformingConsumer.java | 3 +- .../kafka/KafkaTrafficCaptureSource.java | 3 +- .../replay/kafka/TrackingKafkaConsumer.java | 43 +++--- .../TrafficStreamKeyWithKafkaRecordId.java | 9 +- .../replay/tracing/ChannelContextManager.java | 15 ++- .../replay/tracing/ChannelKeyContext.java | 19 +-- .../migrations/replay/tracing/Contexts.java | 68 ++++------ .../tracing/IndirectNestedSpanContext.java | 14 -- .../traffic/source/BlockingTrafficSource.java | 45 ++----- .../traffic/source/InputStreamOfTraffic.java | 12 +- .../replay/BlockingTrafficSourceTest.java | 7 +- .../CompressedFileTrafficCaptureSource.java | 9 +- .../replay/FullTrafficReplayerTest.java | 1 + .../KafkaRestartingTrafficReplayerTest.java | 8 +- ...afficToHttpTransactionAccumulatorTest.java | 5 +- .../replay/TrafficReplayerRunner.java | 1 + .../replay/TrafficReplayerTest.java | 7 +- .../replay/V0_1TrafficCaptureSource.java | 5 +- .../KafkaCommitsWorkBetweenLongPolls.java | 4 +- .../replay/kafka/KafkaKeepAliveTests.java | 4 +- ...KafkaTrafficCaptureSourceLongTermTest.java | 2 +- .../kafka/KafkaTrafficCaptureSourceTest.java | 5 +- .../migrations/replay/TestContext.java | 23 ---- .../migrations/replay/TestRequestKey.java | 3 +- .../TestTrafficStreamsLifecycleContext.java | 8 +- 55 files changed, 419 insertions(+), 430 deletions(-) rename TrafficCapture/{trafficReplayer/src/main/java/org/opensearch/migrations/replay => coreUtilities/src/main/java/org/opensearch/migrations}/tracing/AbstractNestedSpanContext.java (56%) rename TrafficCapture/{trafficReplayer/src/main/java/org/opensearch/migrations/replay => coreUtilities/src/main/java/org/opensearch/migrations}/tracing/DirectNestedSpanContext.java (56%) delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java create mode 100644 TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index ab99a0327..38005eab0 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -22,6 +22,7 @@ dependencies { implementation group: 'software.amazon.msk', name:'aws-msk-iam-auth', version:'1.1.9' testImplementation project(':captureProtobufs') + testImplementation testFixtures(project(path: ':coreUtilities')) testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index bf502596c..3e145c248 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -128,7 +128,6 @@ public CodedOutputStreamWrapper createStream() { log.debug("Sending Kafka producer record: {} for topic: {}", recordId, topicNameForTraffic); var flushContext = new KafkaRecordContext(telemetryContext, - METERING_CLOSURE.makeSpanContinuation("flushRecord"), topicNameForTraffic, recordId, kafkaRecord.value().length); METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); @@ -173,7 +172,7 @@ private Callback handleProducerRecordSent(CompletableFuture cf, METERING_CLOSURE.meterIncrementEvent(flushContext, exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", flushContext.getRecordSize()); - flushContext.currentSpan.end(); + flushContext.close(); if (exception != null) { log.error("Error sending producer record: {}", recordId, exception); diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index e0edb8b2a..bc2742e8d 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -5,6 +5,8 @@ import io.opentelemetry.api.trace.Span; import lombok.AllArgsConstructor; import lombok.Getter; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; @@ -12,18 +14,12 @@ import java.time.Instant; -@AllArgsConstructor -public class KafkaRecordContext implements IScopedInstrumentationAttributes, IWithStartTime { +public class KafkaRecordContext extends DirectNestedSpanContext + implements IScopedInstrumentationAttributes, IWithStartTime { static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); static final AttributeKey RECORD_SIZE_ATTR = AttributeKey.longKey("recordSize"); - @Getter - public final IConnectionContext enclosingScope; - @Getter - public final Span currentSpan; - @Getter - public final Instant startTime; @Getter public final String topic; @Getter @@ -31,14 +27,12 @@ public class KafkaRecordContext implements IScopedInstrumentationAttributes, IWi @Getter public final int recordSize; - public KafkaRecordContext(IConnectionContext enclosingScope, ISpanWithParentGenerator incomingSpan, - String topic, String recordId, int recordSize) { - this.enclosingScope = enclosingScope; + public KafkaRecordContext(IConnectionContext enclosingScope, String topic, String recordId, int recordSize) { + super(enclosingScope); this.topic = topic; this.recordId = recordId; this.recordSize = recordSize; - this.startTime = Instant.now(); - currentSpan = incomingSpan.apply(this.getPopulatedAttributes(), enclosingScope.getCurrentSpan()); + setCurrentSpan("KafkaCapture", "stream_flush_called"); } @Override diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 403bf9ef1..084b386f5 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -19,7 +19,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.migrations.tracing.EmptyContext; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; @@ -77,8 +77,7 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, } private static ConnectionContext createCtx() { - return new ConnectionContext("test", "test", - x->GlobalOpenTelemetry.getTracer("test").spanBuilder("test").startSpan()); + return new ConnectionContext(new RootOtelContext(), "test", "test"); } /** diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index c6d3bc5f0..b836665d0 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; @@ -18,18 +19,15 @@ public class ConnectionContext implements IConnectionContext, IWithStartTime { public final Span currentSpan; @Getter private final Instant startTime; + @Getter + final IInstrumentConstructor rootInstrumentationScope; - public ConnectionContext(IConnectionContext oldContext, ISpanWithParentGenerator spanGenerator) { - this.connectionId = oldContext.getConnectionId(); - this.nodeId = oldContext.getNodeId(); - this.startTime = Instant.now(); - this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), oldContext.getCurrentSpan()); - } - - public ConnectionContext(String connectionId, String nodeId, ISpanGenerator spanGenerator) { + public ConnectionContext(IInstrumentConstructor rootInstrumentationScope, + String connectionId, String nodeId) { + this.rootInstrumentationScope = rootInstrumentationScope; this.connectionId = connectionId; this.nodeId = nodeId; - this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); + this.currentSpan = rootInstrumentationScope.buildSpanWithoutParent("","connectionLifetime"); this.startTime = Instant.now(); } } diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index fbe9a0307..16ed03c38 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -22,6 +22,7 @@ plugins { // id 'checkstyle' id "io.freefair.lombok" version "8.0.1" id 'java' + id 'java-test-fixtures' } //spotbugs { @@ -66,6 +67,10 @@ dependencies { // OpenTelemetry log4j appender implementation("io.opentelemetry.instrumentation:opentelemetry-log4j-appender-2.17:1.30.0-alpha") + + testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' + testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk' } configurations.all { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java similarity index 56% rename from TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java rename to TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java index 7311c7d9a..4901d0e3e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java @@ -1,12 +1,8 @@ -package org.opensearch.migrations.replay.tracing; +package org.opensearch.migrations.tracing; import io.opentelemetry.api.trace.Span; import lombok.Getter; import lombok.NonNull; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.IWithStartTime; import java.time.Instant; @@ -15,10 +11,12 @@ public abstract class AbstractNestedSpanContext extends AbstractNestedSpanContext diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java deleted file mode 100644 index 9d335bfe0..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/EmptyContext.java +++ /dev/null @@ -1,20 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.trace.Span; - -public class EmptyContext implements IInstrumentationAttributes { - public static final EmptyContext singleton = new EmptyContext(); - - private EmptyContext() {} - - @Override - public IInstrumentationAttributes getEnclosingScope() { - return null; - } - - @Override - public AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder; // nothing more to do - } -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java new file mode 100644 index 000000000..32a7505cb --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -0,0 +1,8 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.trace.Span; + +public interface IInstrumentConstructor { + Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName); + Span buildSpanWithoutParent(String scopeName, String spanName); +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index b18cb8104..c61e40141 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -2,11 +2,15 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; +import lombok.NonNull; import java.util.ArrayList; public interface IInstrumentationAttributes { IInstrumentationAttributes getEnclosingScope(); + @NonNull IInstrumentConstructor getRootInstrumentationScope(); + default Span getCurrentSpan() { return null; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 4d14a44f4..bbcb8d9af 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -1,17 +1,18 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; -import lombok.extern.slf4j.Slf4j; +import lombok.NonNull; -import java.util.ArrayList; +public interface IScopedInstrumentationAttributes extends IInstrumentationAttributes, AutoCloseable { -public interface IScopedInstrumentationAttributes extends IInstrumentationAttributes { - - Span getCurrentSpan(); + @Override + @NonNull Span getCurrentSpan(); default void endSpan() { getCurrentSpan().end(); } + + default void close() { + endSpan(); + } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java new file mode 100644 index 000000000..741322e10 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java @@ -0,0 +1,13 @@ +package org.opensearch.migrations.tracing; + +import lombok.NonNull; + +public abstract class IndirectNestedSpanContext + + extends AbstractNestedSpanContext { + public IndirectNestedSpanContext(@NonNull D enclosingScope) { + super(enclosingScope); + } + + public abstract L getLogicalEnclosingScope(); +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java new file mode 100644 index 000000000..e2ad60eb7 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -0,0 +1,123 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.context.Context; +import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.logs.SdkLoggerProvider; +import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; +import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import lombok.Getter; + +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +public class RootOtelContext implements IInstrumentationAttributes, IInstrumentConstructor { + @Getter private final OpenTelemetry openTelemetrySdk; + + public static OpenTelemetry initializeOpenTelemetry(String serviceName, String collectorEndpoint) { + var serviceResource = Resource.getDefault().toBuilder() + .put(ResourceAttributes.SERVICE_NAME, serviceName) + .build(); + + OpenTelemetrySdk openTelemetrySdk = + OpenTelemetrySdk.builder() + .setLoggerProvider( + SdkLoggerProvider.builder() + .setResource(serviceResource) + .addLogRecordProcessor( + BatchLogRecordProcessor.builder( + OtlpGrpcLogRecordExporter.builder() + .setEndpoint(collectorEndpoint) + .build()) + .build()) + .build()) + .setTracerProvider( + SdkTracerProvider.builder() + .setResource(serviceResource) + .addSpanProcessor( + BatchSpanProcessor.builder( + OtlpGrpcSpanExporter.builder() + .setEndpoint(collectorEndpoint) + .setTimeout(2, TimeUnit.SECONDS) + .build()) + .setScheduleDelay(100, TimeUnit.MILLISECONDS) + .build()) + .build()) + .setMeterProvider( + SdkMeterProvider.builder() + .setResource(serviceResource) + .registerMetricReader( + PeriodicMetricReader.builder( + OtlpGrpcMetricExporter.builder() + .setEndpoint(collectorEndpoint) + .build()) + .setInterval(Duration.ofMillis(1000)) + .build()) + .build()) + .build(); + + // Add hook to close SDK, which flushes logs + Runtime.getRuntime().addShutdownHook(new Thread(openTelemetrySdk::close)); + return openTelemetrySdk; + } + + public RootOtelContext() { + this(null, null); + } + + public RootOtelContext(String collectorEndpoint, String serviceName) { + this(Optional.ofNullable(collectorEndpoint) + .map(endpoint-> initializeOpenTelemetry(serviceName, endpoint)) + .orElse(OpenTelemetrySdk.builder().build())); + } + + public RootOtelContext(OpenTelemetry sdk) { + openTelemetrySdk = sdk; + } + + @Override + public IInstrumentationAttributes getEnclosingScope() { + return null; + } + + @Override + public IInstrumentConstructor getRootInstrumentationScope() { + return this; + } + + @Override + public AttributesBuilder fillAttributes(AttributesBuilder builder) { + return builder; // nothing more to do + } + + public static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan) { + return Optional.ofNullable(parentSpan).map(p -> builder.setParent(Context.current().with(p))) + .orElseGet(builder::setNoParent) + .startSpan().setAllAttributes(attrs); + } + + @Override + public Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName) { + var parentSpan = enclosingScope.getCurrentSpan(); + var spanBuilder = getOpenTelemetrySdk().getTracer(scopeName).spanBuilder(spanName); + return buildSpanWithParent(spanBuilder, getPopulatedAttributes(), parentSpan); + } + + public Span buildSpanWithoutParent(String scopeName, String spanName) { + var spanBuilder = getOpenTelemetrySdk().getTracer(scopeName).spanBuilder(spanName); + return buildSpanWithParent(spanBuilder, getPopulatedAttributes(), null); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java index 0a7f4d8c6..00539c58f 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java @@ -27,58 +27,9 @@ public class SimpleMeteringClosure { public final Meter meter; - public final Tracer tracer; public SimpleMeteringClosure(String scopeName) { meter = GlobalOpenTelemetry.getMeter(scopeName); - tracer = GlobalOpenTelemetry.getTracer(scopeName); - } - - public static void initializeOpenTelemetry(String serviceName, String collectorEndpoint) { - var serviceResource = Resource.getDefault().toBuilder() - .put(ResourceAttributes.SERVICE_NAME, serviceName) - .build(); - - OpenTelemetrySdk openTelemetrySdk = - OpenTelemetrySdk.builder() - .setLoggerProvider( - SdkLoggerProvider.builder() - .setResource(serviceResource) - .addLogRecordProcessor( - BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.builder() - .setEndpoint(collectorEndpoint) - .build()) - .build()) - .build()) - .setTracerProvider( - SdkTracerProvider.builder() - .setResource(serviceResource) - .addSpanProcessor( - BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.builder() - .setEndpoint(collectorEndpoint) - .setTimeout(2, TimeUnit.SECONDS) - .build()) - .setScheduleDelay(100, TimeUnit.MILLISECONDS) - .build()) - .build()) - .setMeterProvider( - SdkMeterProvider.builder() - .setResource(serviceResource) - .registerMetricReader( - PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.builder() - .setEndpoint(collectorEndpoint) - .build()) - .setInterval(Duration.ofMillis(1000)) - .build()) - .build()) - .buildAndRegisterGlobal(); - - // Add hook to close SDK, which flushes logs - Runtime.getRuntime().addShutdownHook(new Thread(openTelemetrySdk::close)); - //OpenTelemetryAppender.install(GlobalOpenTelemetry.get()); } public void meterIncrementEvent(IInstrumentationAttributes ctx, String eventName) { @@ -132,20 +83,4 @@ public void meterHistogram(IInstrumentationAttributes ctx, String eventName, Str .put("labelName", eventName) .build()); } - - public ISpanGenerator makeSpanContinuation(String spanName, Span parentSpan) { - var builder = tracer.spanBuilder(spanName); - return (attrs) -> getSpanWithParent(builder, attrs, parentSpan); - } - - public static Span getSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan) { - return Optional.ofNullable(parentSpan).map(p -> builder.setParent(Context.current().with(p))) - .orElseGet(builder::setNoParent) - .startSpan().setAllAttributes(attrs); - } - - public ISpanWithParentGenerator makeSpanContinuation(String spanName) { - var builder = tracer.spanBuilder(spanName); - return (attrs,parentSpan) -> getSpanWithParent(builder, attrs, parentSpan); - } } diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java new file mode 100644 index 000000000..f68b9fff6 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -0,0 +1,21 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.trace.Span; +import lombok.Getter; + +public class TestContext implements IScopedInstrumentationAttributes { + public static final TestContext singleton = new TestContext(); + + @Override + public IInstrumentationAttributes getEnclosingScope() { + return null; + } + + @Getter public IInstrumentConstructor rootInstrumentationScope = new RootOtelContext(); + + @Getter + public Span currentSpan; + public TestContext() { + currentSpan = new RootOtelContext().buildSpanWithoutParent("testScope", "testSpan"); + } +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index 518f368f8..1201b7f5b 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -6,6 +6,7 @@ import lombok.Lombok; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; @@ -16,12 +17,13 @@ public class ConditionallyReliableLoggingHttpHandler extends LoggingHttpHandler { private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpHandler(@NonNull String nodeId, String connectionId, + public ConditionallyReliableLoggingHttpHandler(@NonNull IInstrumentConstructor contextConstructor, + @NonNull String nodeId, String connectionId, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate requestCapturePredicate, @NonNull Predicate headerPredicateForWhenToBlock) throws IOException { - super(nodeId, connectionId, trafficOffloaderFactory, requestCapturePredicate); + super(contextConstructor, nodeId, connectionId, trafficOffloaderFactory, requestCapturePredicate); this.shouldBlockPredicate = headerPredicateForWhenToBlock; } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index c8dd2d888..085619e87 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -21,6 +21,8 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; +import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; @@ -36,10 +38,6 @@ public class LoggingHttpHandler extends ChannelDuplexHandler { public static final String TELEMETRY_SCOPE_NAME = "CapturingHttpHandler"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); - public static final String GATHERING_REQUEST = "gatheringRequest"; - public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; - public static final String GATHERING_RESPONSE = "gatheringResponse"; - public static final String BLOCKED = "blocked"; static class CaptureIgnoreState { static final byte CAPTURE = 0; @@ -143,15 +141,13 @@ public HttpRequest resetCurrentRequest() { protected HttpMessageContext messageContext; - public LoggingHttpHandler(String nodeId, String channelKey, + public LoggingHttpHandler(@NonNull IInstrumentConstructor contextConstructor, String nodeId, String channelKey, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate httpHeadersCapturePredicate) throws IOException { - var parentContext = new ConnectionContext(channelKey, nodeId, - METERING_CLOSURE.makeSpanContinuation("connectionLifetime", null)); + var parentContext = new ConnectionContext(contextConstructor, channelKey, nodeId); - this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.HttpTransactionState.REQUEST, - METERING_CLOSURE.makeSpanContinuation(GATHERING_REQUEST)); + this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.HttpTransactionState.REQUEST); METERING_CLOSURE.meterIncrementEvent(messageContext, "requestStarted"); this.trafficOffloader = trafficOffloaderFactory.createOffloader(parentContext, channelKey); @@ -162,27 +158,11 @@ public LoggingHttpHandler(String nodeId, String channelKey, ); } - static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state) { - switch (state) { - case REQUEST: - return GATHERING_REQUEST; - case INTERNALLY_BLOCKED: - return BLOCKED; - case WAITING: - return WAITING_FOR_RESPONSE; - case RESPONSE: - return GATHERING_RESPONSE; - default: - throw new IllegalStateException("Unknown enum value: "+state); - } - } - protected void rotateNextMessageContext(HttpMessageContext.HttpTransactionState nextState) { - messageContext = new HttpMessageContext(messageContext.getEnclosingScope(), + messageContext = new HttpMessageContext(messageContext.getLogicalEnclosingScope(), (nextState== HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) + messageContext.getSourceRequestIndex(), - nextState, - METERING_CLOSURE.makeSpanContinuation(getSpanLabelForState(nextState))); + nextState); } private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @@ -210,8 +190,8 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { METERING_CLOSURE.meterIncrementEvent(messageContext, "handlerRemoved"); - messageContext.endSpan(); - messageContext.getEnclosingScope().currentSpan.end(); + messageContext.close(); + messageContext.getLogicalEnclosingScope().close(); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index 1920ee567..77a64f651 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -2,6 +2,9 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; @@ -10,7 +13,14 @@ import java.time.Instant; -public class HttpMessageContext implements IHttpTransactionContext, IWithStartTimeAndAttributes { +public class HttpMessageContext extends DirectNestedSpanContext + implements IHttpTransactionContext, IWithStartTimeAndAttributes { + + public static final String GATHERING_REQUEST = "gatheringRequest"; + public static final String BLOCKED = "blocked"; + public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; + public static final String GATHERING_RESPONSE = "gatheringResponse"; + public enum HttpTransactionState { REQUEST, INTERNALLY_BLOCKED, @@ -21,20 +31,31 @@ public enum HttpTransactionState { @Getter final long sourceRequestIndex; @Getter - final ConnectionContext enclosingScope; - @Getter final Instant startTime; @Getter final HttpTransactionState state; - @Getter - final Span currentSpan; - public HttpMessageContext(ConnectionContext enclosingScope, long sourceRequestIndex, HttpTransactionState state, - ISpanWithParentGenerator spanGenerator) { + static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state) { + switch (state) { + case REQUEST: + return GATHERING_REQUEST; + case INTERNALLY_BLOCKED: + return BLOCKED; + case WAITING: + return WAITING_FOR_RESPONSE; + case RESPONSE: + return GATHERING_RESPONSE; + default: + throw new IllegalStateException("Unknown enum value: "+state); + } + } + + + public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestIndex, HttpTransactionState state) { + super(enclosingScope); this.sourceRequestIndex = sourceRequestIndex; - this.enclosingScope = enclosingScope; this.startTime = Instant.now(); this.state = state; - this.currentSpan = spanGenerator.apply(getPopulatedAttributes(), enclosingScope.getCurrentSpan()); + setCurrentSpan("CapturingHttpHandler",getSpanLabelForState(state)); } } diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java index da9c1ea8d..a49183d6b 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java @@ -17,6 +17,7 @@ import org.junit.jupiter.params.provider.ValueSource; import org.opensearch.migrations.testutils.TestUtilities; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamAndByteBufferWrapper; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; @@ -45,8 +46,6 @@ public class ConditionallyReliableLoggingHttpHandlerTest { @RegisterExtension static final OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); - private final Tracer tracer = otelTesting.getOpenTelemetry().getTracer("test"); - private final Meter meter = otelTesting.getOpenTelemetry().getMeter("test"); static class TestStreamManager extends OrderedStreamLifecyleManager implements AutoCloseable { AtomicReference byteBufferAtomicReference = new AtomicReference<>(); @@ -82,11 +81,13 @@ public CodedOutputStreamAndByteBufferWrapper createStream() { private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer channelWriter) throws IOException { + var rootInstrumenter = new RootOtelContext(otelTesting.getOpenTelemetry()); var streamManager = new TestStreamManager(); var offloader = new StreamChannelConnectionCaptureSerializer("Test", "c", streamManager); EmbeddedChannel channel = new EmbeddedChannel( - new ConditionallyReliableLoggingHttpHandler("n", "c", (ctx, connectionId) -> offloader, + new ConditionallyReliableLoggingHttpHandler(rootInstrumenter, + "n", "c", (ctx, connectionId) -> offloader, new RequestCapturePredicate(), x->true)); // true: block every request channelWriter.accept(channel); @@ -154,12 +155,13 @@ private static Consumer getSingleByteAtATimeWriter(boolean useP @Test @ValueSource(booleans = {false, true}) public void testThatSuppressedCaptureWorks() throws Exception { + var rootInstrumenter = new RootOtelContext(); var streamMgr = new TestStreamManager(); var offloader = new StreamChannelConnectionCaptureSerializer("Test", "connection", streamMgr); var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", "uploader")); EmbeddedChannel channel = new EmbeddedChannel( - new ConditionallyReliableLoggingHttpHandler("n", "c", + new ConditionallyReliableLoggingHttpHandler(rootInstrumenter,"n", "c", (ctx, connectionId) -> offloader, headerCapturePredicate, x->true)); getWriter(false, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); channel.close(); @@ -178,12 +180,13 @@ public void testThatSuppressedCaptureWorks() throws Exception { @ParameterizedTest @ValueSource(booleans = {false, true}) public void testThatHealthCheckCaptureCanBeSuppressed(boolean singleBytes) throws Exception { + var rootInstrumenter = new RootOtelContext(); var streamMgr = new TestStreamManager(); var offloader = new StreamChannelConnectionCaptureSerializer("Test", "connection", streamMgr); var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", ".*uploader.*")); EmbeddedChannel channel = new EmbeddedChannel( - new ConditionallyReliableLoggingHttpHandler("n", "c", + new ConditionallyReliableLoggingHttpHandler(rootInstrumenter,"n", "c", (ctx, connectionId) -> offloader, headerCapturePredicate, x->false)); getWriter(singleBytes, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); channel.writeOutbound(Unpooled.wrappedBuffer("response1".getBytes(StandardCharsets.UTF_8))); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index f641ad41e..c60dd175d 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -17,6 +17,7 @@ import org.apache.kafka.common.config.SaslConfigs; import org.apache.logging.log4j.core.util.NullOutputStream; import org.opensearch.common.settings.Settings; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.FileConnectionCaptureFactory; @@ -303,9 +304,7 @@ public static void main(String[] args) throws InterruptedException, IOException var params = parseArgs(args); var backsideUri = convertStringToUri(params.backsideUriString); - if (params.otelCollectorEndpoint != null) { - SimpleMeteringClosure.initializeOpenTelemetry("capture", params.otelCollectorEndpoint); - } + var rootContext = new RootOtelContext(params.otelCollectorEndpoint, "capture"); var sksOp = Optional.ofNullable(params.sslConfigFilePath) .map(sslConfigFile->new DefaultSecurityKeyStore(getSettings(sslConfigFile), @@ -328,7 +327,7 @@ public static void main(String[] args) throws InterruptedException, IOException }).orElse(null); var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(convertPairListToMap(params.suppressCaptureHeaderPairs)); - proxy.start(backsideConnectionPool, params.numThreads, sslEngineSupplier, + proxy.start(rootContext, backsideConnectionPool, params.numThreads, sslEngineSupplier, getConnectionCaptureFactory(params), headerCapturePredicate); } catch (Exception e) { log.atError().setCause(e).setMessage("Caught exception while setting up the server and rethrowing").log(); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java index e6ed7e28e..4325a27f1 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java @@ -8,6 +8,7 @@ import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.util.concurrent.DefaultThreadFactory; import lombok.NonNull; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; @@ -28,7 +29,8 @@ public int getProxyPort() { return proxyPort; } - public void start(BacksideConnectionPool backsideConnectionPool, + public void start(IInstrumentConstructor rootContext, + BacksideConnectionPool backsideConnectionPool, int numThreads, Supplier sslEngineSupplier, IConnectionCaptureFactory connectionCaptureFactory, @@ -39,7 +41,7 @@ public void start(BacksideConnectionPool backsideConnectionPool, try { mainChannel = serverBootstrap.group(bossGroup, workerGroup) .channel(NioServerSocketChannel.class) - .childHandler(new ProxyChannelInitializer<>(backsideConnectionPool, sslEngineSupplier, + .childHandler(new ProxyChannelInitializer<>(rootContext, backsideConnectionPool, sslEngineSupplier, connectionCaptureFactory, requestCapturePredicate)) .childOption(ChannelOption.AUTO_READ, false) .bind(proxyPort).sync().channel(); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 919b5b0c2..4dc9cabed 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -5,8 +5,8 @@ import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.ssl.SslHandler; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import lombok.NonNull; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpHandler; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; @@ -16,16 +16,18 @@ import java.util.function.Supplier; public class ProxyChannelInitializer extends ChannelInitializer { - static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("FrontendConnection"); - private final IConnectionCaptureFactory connectionCaptureFactory; private final Supplier sslEngineProvider; + private final IInstrumentConstructor instrumentationConstructor; private final BacksideConnectionPool backsideConnectionPool; private final RequestCapturePredicate requestCapturePredicate; - public ProxyChannelInitializer(BacksideConnectionPool backsideConnectionPool, Supplier sslEngineSupplier, + public ProxyChannelInitializer(IInstrumentConstructor instrumentationConstructor, + BacksideConnectionPool backsideConnectionPool, + Supplier sslEngineSupplier, IConnectionCaptureFactory connectionCaptureFactory, @NonNull RequestCapturePredicate requestCapturePredicate) { + this.instrumentationConstructor = instrumentationConstructor; this.backsideConnectionPool = backsideConnectionPool; this.sslEngineProvider = sslEngineSupplier; this.connectionCaptureFactory = connectionCaptureFactory; @@ -48,8 +50,9 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler("", connectionId, - connectionCaptureFactory, requestCapturePredicate, this::shouldGuaranteeMessageOffloading)); + ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler(instrumentationConstructor, + "", connectionId, connectionCaptureFactory, requestCapturePredicate, + this::shouldGuaranteeMessageOffloading)); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); } } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java index c405fafe6..131047f13 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java @@ -10,6 +10,7 @@ import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; import org.opensearch.migrations.testutils.SimpleHttpResponse; import org.opensearch.migrations.testutils.SimpleHttpServer; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; @@ -198,8 +199,8 @@ private static String makeTestRequestViaClient(SimpleHttpClientForTesting client try { var connectionPool = new BacksideConnectionPool(testServerUri, null, 10, Duration.ofSeconds(10)); - nshp.get().start(connectionPool, 1, null, connectionCaptureFactory, - new RequestCapturePredicate()); + nshp.get().start(new RootOtelContext(), connectionPool, 1, null, + connectionCaptureFactory, new RequestCapturePredicate()); System.out.println("proxy port = " + port); } catch (InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/TrafficCapture/trafficReplayer/build.gradle b/TrafficCapture/trafficReplayer/build.gradle index c50a37d27..fadad7229 100644 --- a/TrafficCapture/trafficReplayer/build.gradle +++ b/TrafficCapture/trafficReplayer/build.gradle @@ -67,6 +67,7 @@ dependencies { testFixturesImplementation project(':replayerPlugins:jsonMessageTransformers:jsonMessageTransformerInterface') testFixturesImplementation project(':coreUtilities') + testFixturesImplementation testFixtures(project(path: ':coreUtilities')) testFixturesImplementation testFixtures(project(path: ':testUtilities')) testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") @@ -81,6 +82,7 @@ dependencies { testImplementation project(':captureOffloader') testImplementation testFixtures(project(path: ':captureOffloader')) testImplementation testFixtures(project(path: ':testUtilities')) + testImplementation testFixtures(project(path: ':coreUtilities')) testImplementation project(':replayerPlugins:jsonMessageTransformers:jsonJMESPathMessageTransformerProvider') testImplementation project(':replayerPlugins:jsonMessageTransformers:jsonJoltMessageTransformerProvider') testImplementation project(':replayerPlugins:jsonMessageTransformers:openSearch23PlusTargetTransformerProvider') diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index b6aef03bf..9340a4987 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -42,10 +42,8 @@ public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStr startingSourceRequestIndex, indexOfCurrentRequest); var httpTransactionContext = new Contexts.HttpTransactionContext( startingAtTrafficStreamKey.getTrafficStreamsContext(), - requestKey, - Accumulation.METERING_CLOSURE.makeSpanContinuation("httpTransaction")); - requestOrResponseAccumulationContext = new Contexts.RequestAccumulationContext(httpTransactionContext, - Accumulation.METERING_CLOSURE.makeSpanContinuation("accumulatingRequest")); + requestKey); + requestOrResponseAccumulationContext = new Contexts.RequestAccumulationContext(httpTransactionContext); } @NonNull ISourceTrafficChannelKey getBeginningTrafficStreamKey() { @@ -77,8 +75,7 @@ public void rotateRequestGatheringToResponse() { var looseCtx = requestOrResponseAccumulationContext; assert looseCtx instanceof IContexts.IRequestAccumulationContext; requestOrResponseAccumulationContext = new Contexts.ResponseAccumulationContext( - getRequestContext().getLogicalEnclosingScope(), - Accumulation.METERING_CLOSURE.makeSpanContinuation("accumulatingResponse")); + getRequestContext().getLogicalEnclosingScope()); } public void addRequestData(Instant packetTimeStamp, byte[] data) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index 6d088e99d..d5a235906 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -46,8 +46,7 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); log.atDebug().setMessage(()->"Scheduling work for "+ctx.getConnectionId()+" at time "+timestamp).log(); - var scheduledContext = new Contexts.ScheduledContext(ctx, - new SimpleMeteringClosure("RSO").makeSpanContinuation("scheduled")); + var scheduledContext = new Contexts.ScheduledContext(ctx); // this method doesn't use the scheduling that scheduleRequest and scheduleClose use because // doing work associated with a connection is considered to be preprocessing work independent // of the underlying network connection itself, so it's fair to be able to do this without @@ -216,10 +215,8 @@ private void scheduleSendOnConnectionReplaySession(IContexts.IReplayerHttpTransa var eventLoop = channelFutureAndRequestSchedule.eventLoop; var packetReceiverRef = new AtomicReference(); Runnable packetSender = () -> { - try (var targetContext = new Contexts.TargetRequestContext(ctx, - new SimpleMeteringClosure("RSO").makeSpanContinuation("targetTransaction")); - var requestContext = new Contexts.RequestSendingContext(targetContext, - new SimpleMeteringClosure("RSO").makeSpanContinuation("requestSending"))) { + try (var targetContext = new Contexts.TargetRequestContext(ctx); + var requestContext = new Contexts.RequestSendingContext(targetContext)) { sendNextPartAndContinue(() -> memoizePacketConsumer(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), packetReceiverRef), diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java index a0880e074..ea8a5f928 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java @@ -42,10 +42,9 @@ private TrafficCaptureSourceFactory() {} appParams.kafkaTrafficGroupId, appParams.kafkaTrafficEnableMSKAuth, appParams.kafkaTrafficPropertyFile, Clock.systemUTC(), new KafkaBehavioralPolicy()); - } else if (isInputFileActive) { - return new InputStreamOfTraffic(new FileInputStream(appParams.inputFilename)); } else { - return new InputStreamOfTraffic(System.in); + return new InputStreamOfTraffic(ctx, + isInputFileActive ? new FileInputStream(appParams.inputFilename) : System.in); } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index bf805cdd8..993a9e604 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -8,6 +8,7 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.util.concurrent.Future; +import io.opentelemetry.sdk.OpenTelemetrySdk; import lombok.AllArgsConstructor; import lombok.Lombok; import lombok.NonNull; @@ -17,7 +18,7 @@ import org.opensearch.migrations.replay.tracing.Contexts; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; -import org.opensearch.migrations.tracing.EmptyContext; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; @@ -394,11 +395,7 @@ public static void main(String[] args) System.exit(3); return; } - if (params.otelCollectorEndpoint != null) { - SimpleMeteringClosure.initializeOpenTelemetry("replay", params.otelCollectorEndpoint); - } - - var topContext = EmptyContext.singleton; + var topContext = new RootOtelContext(params.otelCollectorEndpoint, "replay"); try (var blockingTrafficSource = TrafficCaptureSourceFactory.createTrafficCaptureSource(topContext, params, Duration.ofSeconds(params.lookaheadTimeSeconds)); var authTransformer = buildAuthTransformerFactory(params)) @@ -659,8 +656,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, // packaging it up and calling the callback. // Escalate it up out handling stack and shutdown. if (t == null || t instanceof Exception) { - try (var tupleHandlingContext = new Contexts.TupleHandlingContext(httpContext, - METERING_CLOSURE.makeSpanContinuation("tupleHandling"))) { + try (var tupleHandlingContext = new Contexts.TupleHandlingContext(httpContext)) { packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); } commitTrafficStreams(context, rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); @@ -996,7 +992,7 @@ public void pullCaptureFromSourceToAccumulator( if (stopReadingRef.get()) { break; } - this.nextChunkFutureRef.set(trafficChunkStream.readNextTrafficStreamChunk(null)); + this.nextChunkFutureRef.set(trafficChunkStream.readNextTrafficStreamChunk(topLevelContext)); List trafficStreams = null; try { trafficStreams = this.nextChunkFutureRef.get().get(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 4fb774691..85d424f60 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -70,10 +70,8 @@ public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri } public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IContexts.IReplayerHttpTransactionContext ctx) { - this.parentContext = new Contexts.TargetRequestContext(ctx, - METERING_CLOSURE.makeSpanContinuation("targetTransaction")); - this.currentRequestContext = new Contexts.RequestSendingContext(this.parentContext, - METERING_CLOSURE.makeSpanContinuation("sendingRequest")); + this.parentContext = new Contexts.TargetRequestContext(ctx); + this.currentRequestContext = new Contexts.RequestSendingContext(this.parentContext); responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), @@ -164,8 +162,7 @@ private void activateChannelForThisConsumer() { addLoggingHandler(pipeline, "B"); pipeline.addLast(new BacksideSnifferHandler(responseBuilder, ()->{ this.currentRequestContext.close(); - this.currentRequestContext = new Contexts.ReceivingHttpResponseContext(this.parentContext, - METERING_CLOSURE.makeSpanContinuation("receivingRequest")); + this.currentRequestContext = new Contexts.ReceivingHttpResponseContext(this.parentContext); })); addLoggingHandler(pipeline, "C"); @@ -280,8 +277,7 @@ private IContexts.IReplayerHttpTransactionContext httpContext() { finalizeRequest() { var ff = activeChannelFuture.getDeferredFutureThroughHandle((v,t)-> { this.currentRequestContext.close(); - this.currentRequestContext = new Contexts.WaitingForHttpResponseContext(parentContext, - new SimpleMeteringClosure("RSO").makeSpanContinuation("waitingForResponse")); + this.currentRequestContext = new Contexts.WaitingForHttpResponseContext(parentContext); var future = new CompletableFuture(); var rval = new DiagnosticTrackableCompletableFuture(future, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index c27fb9552..cf369112e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -73,8 +73,7 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, IAuthTransformerFactory authTransformerFactory, IPacketFinalizingConsumer transformedPacketReceiver, IContexts.IReplayerHttpTransactionContext httpTransactionContext) { - transformationContext = new Contexts.RequestTransformationContext(httpTransactionContext, - METERING_CLOSURE.makeSpanContinuation("transformation")); + transformationContext = new Contexts.RequestTransformationContext(httpTransactionContext); chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index ff6dd2201..0f7959ed9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -19,7 +19,6 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -97,7 +96,7 @@ public KafkaTrafficCaptureSource(@NonNull IInstrumentationAttributes globalConte Clock clock, @NonNull KafkaBehavioralPolicy behavioralPolicy) { - this.channelContextManager = new ChannelContextManager(); + this.channelContextManager = new ChannelContextManager(globalContext); trackingKafkaConsumer = new TrackingKafkaConsumer(globalContext, kafkaConsumer, topic, keepAliveInterval, clock, this::onKeyFinishedCommitting); trafficStreamsRead = new AtomicLong(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 2472ebeb0..bbe4353e6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -11,12 +11,9 @@ import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.ISpanGenerator; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.slf4j.event.Level; @@ -45,8 +42,9 @@ */ @Slf4j public class TrackingKafkaConsumer implements ConsumerRebalanceListener { + public static final String TELEMETRY_SCOPE_NAME = "TrackingKafkaConsumer"; private static final SimpleMeteringClosure METERING_CLOSURE = - new SimpleMeteringClosure("TrackingKafkaConsumer"); + new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); @AllArgsConstructor private static class OrderedKeyHolder implements Comparable { @@ -76,34 +74,30 @@ public int hashCode() { } public static class TouchScopeContext extends DirectNestedSpanContext { - public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope, - @NonNull ISpanWithParentGenerator spanGenerator) { + public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "touch"); } } public static class PollScopeContext extends DirectNestedSpanContext { - public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope, - @NonNull ISpanWithParentGenerator spanGenerator) { + public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "kafkaPoll"); } } public static class CommitScopeContext extends DirectNestedSpanContext { - public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope, - @NonNull ISpanWithParentGenerator spanGenerator) { + public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "commit"); } } public static class KafkaCommitScopeContext extends DirectNestedSpanContext { - public KafkaCommitScopeContext(@NonNull CommitScopeContext enclosingScope, - @NonNull ISpanWithParentGenerator spanGenerator) { + public KafkaCommitScopeContext(@NonNull CommitScopeContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "kafkaCommit"); } } @@ -204,12 +198,10 @@ public Optional getNextRequiredTouch() { } public void touch(IInstrumentationAttributes context) { - try (var touchCtx = new TouchScopeContext(context, - METERING_CLOSURE.makeSpanContinuation("touch"))) { + try (var touchCtx = new TouchScopeContext(context)) { log.trace("touch() called."); pause(); - try (var pollCtx = new PollScopeContext(touchCtx, - METERING_CLOSURE.makeSpanContinuation("kafkaPoll"))) { + try (var pollCtx = new PollScopeContext(touchCtx)) { var records = kafkaConsumer.poll(Duration.ZERO); if (!records.isEmpty()) { throw new IllegalStateException("Expected no entries once the consumer was paused. " + @@ -295,8 +287,7 @@ private Stream applyBuilder(BiFunction records; - try (var pollContext = new PollScopeContext(context, - METERING_CLOSURE.makeSpanContinuation("kafkaPoll"))) { + try (var pollContext = new PollScopeContext(context)) { records = kafkaConsumer.poll(keepAliveInterval.dividedBy(POLL_TIMEOUT_KEEP_ALIVE_DIVISOR)); } log.atLevel(records.isEmpty()? Level.TRACE:Level.INFO) @@ -363,8 +354,7 @@ private void safeCommit(IInstrumentationAttributes incomingContext) { if (nextSetOfCommitsMap.isEmpty()) { return; } - context = new CommitScopeContext(incomingContext, - METERING_CLOSURE.makeSpanContinuation("commit")); + context = new CommitScopeContext(incomingContext); nextCommitsMapCopy = new HashMap<>(); nextCommitsMapCopy.putAll(nextSetOfCommitsMap); } @@ -408,8 +398,7 @@ private static void safeCommitStatic(CommitScopeContext context, Consumer nextCommitsMap) { assert !nextCommitsMap.isEmpty(); log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); - try (var kafkaContext = new KafkaCommitScopeContext(context, - METERING_CLOSURE.makeSpanContinuation("kafkaCommit"));) { + try (var kafkaContext = new KafkaCommitScopeContext(context)) { kafkaConsumer.commitSync(nextCommitsMap); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java index d7f0d2da3..9671315fe 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java @@ -15,9 +15,6 @@ @EqualsAndHashCode(callSuper = true) @Getter class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext implements KafkaCommitOffsetData { - public static final String TELEMETRY_SCOPE_NAME = "KafkaRecords"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - private final int generation; private final int partition; private final long offset; @@ -35,10 +32,8 @@ class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext i this.partition = partition; this.offset = offset; var channelKeyContext = contextFactory.apply(this); - var kafkaContext = new Contexts.KafkaRecordContext(channelKeyContext, recordId, - METERING_CLOSURE.makeSpanContinuation("kafkaRecord")); - this.setTrafficStreamsContext(new Contexts.TrafficStreamsLifecycleContext(kafkaContext, this, - METERING_CLOSURE.makeSpanContinuation("trafficStreamLifecycle"))); + var kafkaContext = new Contexts.KafkaRecordContext(channelKeyContext, recordId); + this.setTrafficStreamsContext(new Contexts.TrafficStreamsLifecycleContext(kafkaContext, this)); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index 897263e39..b49754278 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -2,6 +2,8 @@ import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.ISpanGenerator; import org.opensearch.migrations.tracing.SimpleMeteringClosure; @@ -11,6 +13,11 @@ public class ChannelContextManager implements Function { public static final String TELEMETRY_SCOPE_NAME = "Channel"; public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); + private final IInstrumentationAttributes globalContext; + + public ChannelContextManager(IInstrumentationAttributes globalContext) { + this.globalContext = globalContext; + } private static class RefCountedContext { @Getter final ChannelKeyContext context; @@ -44,12 +51,8 @@ public ChannelKeyContext apply(ITrafficStreamKey tsk) { } public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { - return retainOrCreateContext(tsk, METERING_CLOSURE.makeSpanContinuation("channel", null)); - } - - public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk, ISpanGenerator spanGenerator) { return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), - k-> new RefCountedContext(new ChannelKeyContext(tsk, spanGenerator))).retain(); + k-> new RefCountedContext(new ChannelKeyContext(globalContext, tsk))).retain(); } public ChannelKeyContext releaseContextFor(ChannelKeyContext ctx) { @@ -58,7 +61,7 @@ public ChannelKeyContext releaseContextFor(ChannelKeyContext ctx) { assert ctx == refCountedCtx.context; var finalRelease = refCountedCtx.release(); if (finalRelease) { - ctx.currentSpan.end(); + ctx.close(); connectionToChannelContextMap.remove(connId); } return ctx; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java index 20a251fec..1c7e72c74 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java @@ -1,25 +1,20 @@ package org.opensearch.migrations.replay.tracing; -import io.opentelemetry.api.trace.Span; import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.tracing.ISpanGenerator; +import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithStartTime; -import java.time.Instant; - -public class ChannelKeyContext implements IChannelKeyContext, IWithStartTime { +public class ChannelKeyContext extends AbstractNestedSpanContext + implements IChannelKeyContext, IWithStartTime { @Getter final ISourceTrafficChannelKey channelKey; - @Getter - final Span currentSpan; - @Getter - final Instant startTime; - public ChannelKeyContext(ISourceTrafficChannelKey channelKey, ISpanGenerator spanGenerator) { + public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { + super(enclosingScope); this.channelKey = channelKey; - this.currentSpan = spanGenerator.apply(getPopulatedAttributes()); - this.startTime = Instant.now(); + setCurrentSpan("Connection", "channel"); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java index 2fff0f2f5..6fbdb38c8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java @@ -2,8 +2,8 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IndirectNestedSpanContext; public class Contexts { @@ -13,11 +13,10 @@ public static class KafkaRecordContext extends DirectNestedSpanContext implements IContexts.IRequestAccumulationContext { - public RequestAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public RequestAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("Accumulator", "accumulatingRequest"); } } public static class ResponseAccumulationContext extends DirectNestedSpanContext implements IContexts.IResponseAccumulationContext { - public ResponseAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public ResponseAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("Accumulator", "accumulatingResponse"); } } public static class RequestTransformationContext extends DirectNestedSpanContext implements IContexts.IRequestTransformationContext { - public RequestTransformationContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public RequestTransformationContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("HttpTransformer", "transformation"); } } public static class ScheduledContext extends DirectNestedSpanContext implements IContexts.IScheduledContext { - public ScheduledContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public ScheduledContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("RequestSender", "scheduled"); } } public static class TargetRequestContext extends DirectNestedSpanContext implements IContexts.ITargetRequestContext { - public TargetRequestContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public TargetRequestContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("RequestSender", "targetTransaction"); } } public static class RequestSendingContext extends DirectNestedSpanContext implements IContexts.IRequestSendingContext { - public RequestSendingContext(IContexts.ITargetRequestContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public RequestSendingContext(IContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("RequestSender","requestSending"); } } public static class WaitingForHttpResponseContext extends DirectNestedSpanContext implements IContexts.IWaitingForHttpResponseContext { - public WaitingForHttpResponseContext(IContexts.ITargetRequestContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public WaitingForHttpResponseContext(IContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("RequestSender", "waitingForResponse"); } } public static class ReceivingHttpResponseContext extends DirectNestedSpanContext implements IContexts.IReceivingHttpResponseContext { - public ReceivingHttpResponseContext(IContexts.ITargetRequestContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public ReceivingHttpResponseContext(IContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("HttpSender", "receivingRequest"); } } public static class TupleHandlingContext extends DirectNestedSpanContext implements IContexts.ITupleHandlingContext { - public TupleHandlingContext(IContexts.IReplayerHttpTransactionContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public TupleHandlingContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan("TrafficReplayer", "tupleHandling"); } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java deleted file mode 100644 index 513242c45..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IndirectNestedSpanContext.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import lombok.NonNull; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; - -public abstract class IndirectNestedSpanContext - extends AbstractNestedSpanContext { - public IndirectNestedSpanContext(@NonNull D enclosingScope) { - super(enclosingScope); - } - - public abstract L getLogicalEnclosingScope(); -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 2aac42f2b..0b8213383 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -1,18 +1,12 @@ package org.opensearch.migrations.replay.traffic.source; import com.google.protobuf.Timestamp; -import io.opentelemetry.api.trace.Span; -import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.ISpanGenerator; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.slf4j.event.Level; @@ -42,8 +36,7 @@ */ @Slf4j public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlowController { - private static final SimpleMeteringClosure METERING_CLOSURE = - new SimpleMeteringClosure("BlockingTrafficSource"); + private static final String TELEMETRY_SCOPE_NAME = "BlockingTrafficSource"; private final ISimpleTrafficCaptureSource underlyingSource; private final AtomicReference lastTimestampSecondsRef; @@ -54,27 +47,25 @@ public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlo private final Semaphore readGate; private final Duration bufferTimeWindow; - public static class ReadChunkContext extends DirectNestedSpanContext { - public ReadChunkContext(IInstrumentationAttributes enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public static class ReadChunkContext + extends DirectNestedSpanContext { + public ReadChunkContext(T enclosingScope) { super(enclosingScope); - setCurrentSpanWithNoParent(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "readNextTrafficStreamChunk"); } } public static class BackPressureBlockContext extends DirectNestedSpanContext { - public BackPressureBlockContext(@NonNull ReadChunkContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public BackPressureBlockContext(@NonNull ReadChunkContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "backPressureBlock"); } } public static class WaitForNextSignal extends DirectNestedSpanContext { - public WaitForNextSignal(@NonNull BackPressureBlockContext enclosingScope, - ISpanWithParentGenerator spanGenerator) { + public WaitForNextSignal(@NonNull BackPressureBlockContext enclosingScope) { super(enclosingScope); - setCurrentSpan(spanGenerator); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "waitForNextBackPressureCheck"); } } @@ -114,10 +105,6 @@ public Duration getBufferTimeWindow() { return bufferTimeWindow; } - public CompletableFuture> readNextTrafficStreamChunk() { - return readNextTrafficStreamChunk(null); - } - /** * Reads the next chunk that is available before the current stopReading barrier. However, * that barrier isn't meant to be a tight barrier with immediate effect. @@ -127,8 +114,7 @@ public CompletableFuture> readNextTrafficStreamChunk @Override public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { - var readContext = new ReadChunkContext(context, - METERING_CLOSURE.makeSpanContinuation("readNextTrafficStreamChunk")); + var readContext = new ReadChunkContext(context); log.info("BlockingTrafficSource::readNext"); var trafficStreamListFuture = CompletableFuture .supplyAsync(() -> blockIfNeeded(readContext), task -> new Thread(task).start()) @@ -161,8 +147,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { BackPressureBlockContext blockContext = null; while (stopReadingAtRef.get().isBefore(lastTimestampSecondsRef.get())) { if (blockContext == null) { - blockContext = new BackPressureBlockContext(readContext, - METERING_CLOSURE.makeSpanContinuation("backPressureBlock")); + blockContext = new BackPressureBlockContext(readContext); } try { log.atInfo().setMessage("blocking until signaled to read the next chunk last={} stop={}") @@ -172,8 +157,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { var nextTouchOp = underlyingSource.getNextRequiredTouch(); if (nextTouchOp.isEmpty()) { log.trace("acquiring readGate semaphore (w/out timeout)"); - try (var waitContext = new WaitForNextSignal(blockContext, - METERING_CLOSURE.makeSpanContinuation("waitForNextBackPressureCheck"))) { + try (var waitContext = new WaitForNextSignal(blockContext)) { readGate.acquire(); } } else { @@ -188,8 +172,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { // if this doesn't succeed, we'll loop around & likely do a touch, then loop around again. // if it DOES succeed, we'll loop around and make sure that there's not another reason to stop log.atTrace().setMessage(() -> "acquring readGate semaphore with timeout=" + waitIntervalMs).log(); - try (var waitContext = new WaitForNextSignal(blockContext, - METERING_CLOSURE.makeSpanContinuation("waitForNextBackPressureCheck"))) { + try (var waitContext = new WaitForNextSignal(blockContext)) { readGate.tryAcquire(waitIntervalMs, TimeUnit.MILLISECONDS); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index bf3cb0c2b..70a1a61b7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -8,12 +8,10 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; -import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.EOFException; @@ -25,13 +23,13 @@ @Slf4j public class InputStreamOfTraffic implements ISimpleTrafficCaptureSource { - private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("InputStreamOfTraffic"); + private static final String TELEMETRY_SCOPE_NAME = "InputStreamOfTraffic"; private final InputStream inputStream; private final AtomicInteger trafficStreamsRead = new AtomicInteger(); private final ChannelContextManager channelContextManager; - public InputStreamOfTraffic(InputStream inputStream) { - this.channelContextManager = new ChannelContextManager(); + public InputStreamOfTraffic(IInstrumentationAttributes context, InputStream inputStream) { + this.channelContextManager = new ChannelContextManager(context); this.inputStream = inputStream; } @@ -43,7 +41,7 @@ private static class IOSTrafficStreamContext public IOSTrafficStreamContext(@NonNull IChannelKeyContext ctx, ITrafficStreamKey tsk) { super(ctx); this.trafficStreamKey = tsk; - setCurrentSpan(METERING_CLOSURE.makeSpanContinuation("trafficStreamLifecycle")); + setCurrentSpan(TELEMETRY_SCOPE_NAME, "trafficStreamLifecycle"); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 7273bc296..1f23a4bc1 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -13,6 +13,7 @@ import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -45,7 +46,7 @@ void readNextChunkTest() throws Exception { blockingSource.stopReadsPast(sourceStartTime.plus(Duration.ofMillis(0))); var firstChunk = new ArrayList(); for (int i = 0; i<=BUFFER_MILLIS+SHIFT; ++i) { - var nextPieceFuture = blockingSource.readNextTrafficStreamChunk(); + var nextPieceFuture = blockingSource.readNextTrafficStreamChunk(TestContext.singleton); nextPieceFuture.get(500000, TimeUnit.MILLISECONDS) .forEach(ts->firstChunk.add(ts)); } @@ -53,7 +54,7 @@ void readNextChunkTest() throws Exception { Assertions.assertTrue(BUFFER_MILLIS+SHIFT <= firstChunk.size()); Instant lastTime = null; for (int i =SHIFT; i{ @@ -193,7 +194,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); try (var bais = new ByteArrayInputStream(bytes)) { - try (var trafficSource = new InputStreamOfTraffic(bais)) { + try (var trafficSource = new InputStreamOfTraffic(TestContext.singleton, bais)) { tr.pullCaptureFromSourceToAccumulator(trafficSource, trafficAccumulator); } } @@ -262,7 +263,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel } try (var bais = new ByteArrayInputStream(serializedChunks)) { - try (var trafficSource = new InputStreamOfTraffic(bais)) { + try (var trafficSource = new InputStreamOfTraffic(TestContext.singleton, bais)) { tr.pullCaptureFromSourceToAccumulator(trafficSource, trafficAccumulator); } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java index ffbf685db..587bc0ec2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java @@ -2,6 +2,7 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.IOException; @@ -12,8 +13,8 @@ public class V0_1TrafficCaptureSource extends CompressedFileTrafficCaptureSource protected final HashMap connectionProgressMap; - public V0_1TrafficCaptureSource(String filename) throws IOException { - super(filename); + public V0_1TrafficCaptureSource(IInstrumentationAttributes context, String filename) throws IOException { + super(context, filename); connectionProgressMap = new HashMap<>(); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index 8b2133b7a..b547c592a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -18,7 +18,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.TestContext; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.testcontainers.containers.KafkaContainer; @@ -113,7 +113,7 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { for (int i=0; i{ var tsk = ts.getKey(); log.atInfo().setMessage(()->"checking for "+tsk).log(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index 209bb91af..feb28d7cf 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -5,7 +5,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.TestContext; +import org.opensearch.migrations.tracing.TestContext; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index c14b3085a..108202cfa 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -10,9 +10,10 @@ import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.TestContext; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -44,7 +45,7 @@ public void testRecordToString() { .setNodeId("n") .setNumber(7) .build(); - var contextFactory = new ChannelContextManager(); + var contextFactory = new ChannelContextManager(TestContext.singleton); var tsk = new TrafficStreamKeyWithKafkaRecordId(contextFactory, ts, "testRecord", 1, 2, 123); Assertions.assertEquals("n.c.7|partition=2|offset=123", tsk.toString()); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java deleted file mode 100644 index 8a917406c..000000000 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestContext.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.opensearch.migrations.replay; - -import io.opentelemetry.api.trace.Span; -import lombok.Getter; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; - -public class TestContext implements IScopedInstrumentationAttributes { - public static final TestContext singleton = new TestContext(); - - @Override - public IInstrumentationAttributes getEnclosingScope() { - return null; - } - - @Getter - public Span currentSpan; - public TestContext() { - currentSpan = new SimpleMeteringClosure("test").makeSpanContinuation("testSpan") - .apply(getPopulatedAttributes(), null); - } -} diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 4c0df1bed..797fb077b 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -23,7 +23,6 @@ public static Contexts.HttpTransactionContext getTestConnectionRequestContext(St PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, tsk -> new TestTrafficStreamsLifecycleContext(tsk)), 0, replayerIdx); - return new Contexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), - rk, METERING_CLOSURE.makeSpanContinuation("test2")); + return new Contexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index ddea5a489..119060b52 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -1,11 +1,11 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IChannelKeyContext; import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.SimpleMeteringClosure; class TestTrafficStreamsLifecycleContext @@ -16,9 +16,9 @@ class TestTrafficStreamsLifecycleContext private final ITrafficStreamKey trafficStreamKey; public TestTrafficStreamsLifecycleContext(ITrafficStreamKey tsk) { - super(new ChannelKeyContext(tsk, METERING_CLOSURE.makeSpanContinuation("channel", null))); + super(new ChannelKeyContext(new RootOtelContext(), tsk)); this.trafficStreamKey = tsk; - setCurrentSpan(METERING_CLOSURE.makeSpanContinuation("stream")); + setCurrentSpan("testScope","testSpan"); } @Override From 0e5fe09fc41703a6d14bb44e327f70063756cb9e Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 22 Dec 2023 15:08:17 -0500 Subject: [PATCH 36/94] Refactoring. Which classes emit metrics, all scopes have names, and grouping more contexts together in files. 1) RootOtelContext creates all SimpleMeteringClosures and IInstrumentationAttributes has default methods to wire together all metric emissions (making it a lot more convenient). 2) All IInstrumentationAttributes have a scopeName now so that it's consistent and can be used to make metrics and spans. 3) All replayer contexts were split into interfaces and implementations (bridge or PIMPL pattern). I've moved them around so that they now all reside in the tracing directory in the same type of structure. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 25 +-- .../tracing/KafkaRecordContext.java | 6 +- .../tracing/AbstractNestedSpanContext.java | 4 +- .../tracing/DirectNestedSpanContext.java | 2 +- .../tracing/IInstrumentConstructor.java | 2 + .../tracing/IInstrumentationAttributes.java | 28 +++ .../migrations/tracing/RootOtelContext.java | 81 +++++++- .../tracing/SimpleMeteringClosure.java | 69 +++---- .../commoncontexts/IConnectionContext.java | 2 + .../migrations/tracing/TestContext.java | 5 + ...nditionallyReliableLoggingHttpHandler.java | 9 +- .../netty/LoggingHttpHandler.java | 24 +-- .../netty/tracing/HttpMessageContext.java | 4 +- .../migrations/replay/Accumulation.java | 2 - .../replay/AccumulationCallbacks.java | 14 +- ...edTrafficToHttpTransactionAccumulator.java | 6 +- .../replay/ClientConnectionPool.java | 12 +- .../replay/PacketConsumerFactory.java | 5 +- ...acketToTransformingHttpHandlerFactory.java | 5 +- .../migrations/replay/ReplayEngine.java | 10 +- .../replay/RequestResponsePacketPair.java | 28 +-- .../replay/RequestSenderOrchestrator.java | 30 ++- .../migrations/replay/TrafficReplayer.java | 24 +-- .../NettyPacketToHttpConsumer.java | 28 ++- .../http/HttpJsonTransformingConsumer.java | 19 +- ...dHttpRequestPreliminaryConvertHandler.java | 7 +- ...ttySendByteBufsToPacketHandlerHandler.java | 7 +- .../http/RequestPipelineOrchestrator.java | 6 +- .../datatypes/ConnectionReplaySession.java | 4 +- .../datatypes/ISourceTrafficChannelKey.java | 5 +- .../replay/datatypes/ITrafficStreamKey.java | 3 - .../PojoTrafficStreamKeyAndContext.java | 9 +- .../kafka/KafkaTrafficCaptureSource.java | 16 +- .../replay/kafka/TrackingKafkaConsumer.java | 32 ++- .../TrafficStreamKeyWithKafkaRecordId.java | 13 +- .../replay/tracing/ChannelContextManager.java | 21 +- .../replay/tracing/ChannelKeyContext.java | 25 --- .../migrations/replay/tracing/Contexts.java | 168 --------------- .../replay/tracing/IChannelKeyContext.java | 18 -- .../tracing/IKafkaConsumerContexts.java | 16 ++ .../{IContexts.java => IReplayContexts.java} | 65 +++++- .../tracing/ITrafficSourceContexts.java | 16 ++ .../replay/tracing/KafkaConsumerContexts.java | 42 ++++ .../replay/tracing/ReplayContexts.java | 192 ++++++++++++++++++ .../replay/tracing/TrafficSourceContexts.java | 38 ++++ .../traffic/source/BlockingTrafficSource.java | 39 +--- .../traffic/source/InputStreamOfTraffic.java | 14 +- .../replay/FullTrafficReplayerTest.java | 5 +- ...afficToHttpTransactionAccumulatorTest.java | 14 +- .../replay/TrafficReplayerTest.java | 23 +-- .../migrations/replay/TestRequestKey.java | 10 +- .../TestTrafficStreamsLifecycleContext.java | 20 +- 52 files changed, 715 insertions(+), 557 deletions(-) delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java delete mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java rename TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/{IContexts.java => IReplayContexts.java} (58%) create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 3e145c248..4bbad1e9d 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -32,9 +32,6 @@ @Slf4j public class KafkaCaptureFactory implements IConnectionCaptureFactory { - private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("KafkaCapture"); - - private static final MetricsLogger metricsLogger = new MetricsLogger("BacksideHandler"); private static final String DEFAULT_TOPIC_NAME_FOR_TRAFFIC = "logging-traffic-topic"; @@ -84,8 +81,8 @@ class StreamManager extends OrderedStreamLifecyleManager { public StreamManager(IConnectionContext ctx, String connectionId) { this.telemetryContext = ctx; - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_created"); - METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", 1); + ctx.meterIncrementEvent("offloader_created"); + telemetryContext.meterDeltaEvent("offloaders_active", 1); this.connectionId = connectionId; this.startTime = Instant.now(); @@ -94,15 +91,15 @@ public StreamManager(IConnectionContext ctx, String connectionId) { @Override public void close() throws IOException { log.atInfo().setMessage(() -> "factory.close()").log(); - METERING_CLOSURE.meterHistogramMillis(telemetryContext, "offloader_stream_lifetime", + telemetryContext.meterHistogramMillis("offloader_stream_lifetime", Duration.between(startTime, Instant.now())); - METERING_CLOSURE.meterDeltaEvent(telemetryContext, "offloaders_active", -1); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "offloader_closed"); + telemetryContext.meterDeltaEvent("offloaders_active", -1); + telemetryContext.meterIncrementEvent("offloader_closed"); } @Override public CodedOutputStreamWrapper createStream() { - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_created"); + telemetryContext.meterIncrementEvent("stream_created"); ByteBuffer bb = ByteBuffer.allocate(bufferSize); return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb); @@ -129,7 +126,7 @@ public CodedOutputStreamWrapper createStream() { var flushContext = new KafkaRecordContext(telemetryContext, topicNameForTraffic, recordId, kafkaRecord.value().length); - METERING_CLOSURE.meterIncrementEvent(telemetryContext, "stream_flush_called"); + telemetryContext.meterIncrementEvent("stream_flush_called"); // Async request to Kafka cluster producer.send(kafkaRecord, handleProducerRecordSent(cf, recordId, flushContext)); @@ -165,11 +162,9 @@ private Callback handleProducerRecordSent(CompletableFuture cf, // that field out of scope. return (metadata, exception) -> { log.atInfo().setMessage(()->"kafka completed sending a record").log(); - METERING_CLOSURE.meterHistogramMicros(flushContext, - exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms"); - METERING_CLOSURE.meterIncrementEvent(flushContext, - exception==null ? "stream_flush_success" : "stream_flush_failure"); - METERING_CLOSURE.meterIncrementEvent(flushContext, + flushContext.meterHistogramMicros(exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms"); + flushContext.meterIncrementEvent(exception==null ? "stream_flush_success" : "stream_flush_failure"); + flushContext.meterIncrementEvent( exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", flushContext.getRecordSize()); flushContext.close(); diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index bc2742e8d..95134f4a9 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -12,8 +12,6 @@ import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithStartTime; -import java.time.Instant; - public class KafkaRecordContext extends DirectNestedSpanContext implements IScopedInstrumentationAttributes, IWithStartTime { static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); @@ -32,9 +30,11 @@ public KafkaRecordContext(IConnectionContext enclosingScope, String topic, Strin this.topic = topic; this.recordId = recordId; this.recordSize = recordSize; - setCurrentSpan("KafkaCapture", "stream_flush_called"); + setCurrentSpan("stream_flush_called"); } + @Override public String getScopeName() { return "KafkaCapture"; } + @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder.put(TOPIC_ATTR, getTopic()) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java index 4901d0e3e..241c614ca 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java @@ -26,8 +26,8 @@ public IInstrumentationAttributes getEnclosingScope() { public T getImmediateEnclosingScope() { return enclosingScope; } - protected void setCurrentSpan(String scopeName, String spanName) { - setCurrentSpan(rootInstrumentationScope.buildSpan(enclosingScope, scopeName, spanName)); + protected void setCurrentSpan(String spanName) { + setCurrentSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), spanName)); } protected void setCurrentSpanWithNoParent(@NonNull ISpanWithParentGenerator spanGenerator) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java index 38611d841..1894b1b9c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java @@ -1,6 +1,6 @@ package org.opensearch.migrations.tracing; -public class DirectNestedSpanContext +public abstract class DirectNestedSpanContext extends AbstractNestedSpanContext implements IWithTypedEnclosingScope { public DirectNestedSpanContext(T enclosingScope) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index 32a7505cb..d90d9e52e 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -1,8 +1,10 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Span; public interface IInstrumentConstructor { Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName); Span buildSpanWithoutParent(String scopeName, String spanName); + SimpleMeteringClosure buildMeter(IInstrumentationAttributes context); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index c61e40141..641f25942 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -5,9 +5,11 @@ import io.opentelemetry.api.trace.Span; import lombok.NonNull; +import java.time.Duration; import java.util.ArrayList; public interface IInstrumentationAttributes { + String getScopeName(); IInstrumentationAttributes getEnclosingScope(); @NonNull IInstrumentConstructor getRootInstrumentationScope(); default Span getCurrentSpan() { return null; } @@ -34,4 +36,30 @@ default AttributesBuilder getPopulatedAttributesBuilder() { } return builder; } + + default void meterIncrementEvent(String eventName) { + getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName); + } + default void meterIncrementEvent(String eventName, long increment) { + getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName, increment); + } + default void meterDeltaEvent(String eventName, long delta) { + getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName, delta); + } + default void meterHistogramMicros(String eventName, Duration value) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, value); + } + default void meterHistogramMillis(String eventName, Duration value) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName, value); + } + default void meterHistogram(String eventName, String units, long value) { + getRootInstrumentationScope().buildMeter(this).meterHistogram(eventName, units, value); + } + default void meterHistogramMicros(String eventName) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName); + } + default void meterHistogramMillis(String eventName) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName); + } + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index e2ad60eb7..f7cfa5f88 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -3,6 +3,7 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.context.Context; @@ -18,21 +19,22 @@ import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; -import lombok.Getter; +import lombok.NonNull; import java.time.Duration; +import java.time.Instant; import java.util.Optional; import java.util.concurrent.TimeUnit; public class RootOtelContext implements IInstrumentationAttributes, IInstrumentConstructor { - @Getter private final OpenTelemetry openTelemetrySdk; + private final OpenTelemetry openTelemetryImpl; public static OpenTelemetry initializeOpenTelemetry(String serviceName, String collectorEndpoint) { var serviceResource = Resource.getDefault().toBuilder() .put(ResourceAttributes.SERVICE_NAME, serviceName) .build(); - OpenTelemetrySdk openTelemetrySdk = + var openTelemetrySdk = OpenTelemetrySdk.builder() .setLoggerProvider( SdkLoggerProvider.builder() @@ -85,7 +87,12 @@ public RootOtelContext(String collectorEndpoint, String serviceName) { } public RootOtelContext(OpenTelemetry sdk) { - openTelemetrySdk = sdk; + openTelemetryImpl = sdk; + } + + @Override + public String getScopeName() { + return "Root"; } @Override @@ -94,6 +101,7 @@ public IInstrumentationAttributes getEnclosingScope() { } @Override + @NonNull public IInstrumentConstructor getRootInstrumentationScope() { return this; } @@ -112,12 +120,73 @@ public static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Sp @Override public Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName) { var parentSpan = enclosingScope.getCurrentSpan(); - var spanBuilder = getOpenTelemetrySdk().getTracer(scopeName).spanBuilder(spanName); + var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); return buildSpanWithParent(spanBuilder, getPopulatedAttributes(), parentSpan); } public Span buildSpanWithoutParent(String scopeName, String spanName) { - var spanBuilder = getOpenTelemetrySdk().getTracer(scopeName).spanBuilder(spanName); + var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); return buildSpanWithParent(spanBuilder, getPopulatedAttributes(), null); } + + public SimpleMeteringClosure buildMeter(IInstrumentationAttributes ctx) { + return new SimpleMeteringClosure(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); + } + + OpenTelemetry getOpenTelemetry() { + return openTelemetryImpl; + } + + public void meterIncrementEvent(Meter meter, IInstrumentationAttributes ctx, String eventName) { + meterIncrementEvent(meter, ctx, eventName, 1); + } + + public void meterIncrementEvent(Meter meter, IInstrumentationAttributes ctx, String eventName, long increment) { + meter.counterBuilder(eventName) + .build().add(increment, ctx.getPopulatedAttributesBuilder() + .put("labelName", eventName) + .build()); + } + + public void meterDeltaEvent(Meter meter, IInstrumentationAttributes ctx, String eventName, long delta) { + if (ctx == null) { + return; + } + meter.upDownCounterBuilder(eventName) + .build().add(delta, ctx.getPopulatedAttributesBuilder() + .put("labelName", eventName) + .build()); + } + + public + void meterHistogramMillis(Meter meter, T ctx, String eventName) { + meterHistogram(meter, ctx, eventName, "ms", + Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); + } + + public + void meterHistogramMicros(Meter meter, T ctx, String eventName) { + meterHistogram(meter, ctx, eventName, "us", + Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); + } + + public void meterHistogramMillis(Meter meter, IInstrumentationAttributes ctx, String eventName, Duration between) { + meterHistogram(meter, ctx, eventName, "ms", between.toMillis()); + } + + public void meterHistogramMicros(Meter meter, IInstrumentationAttributes ctx, String eventName, Duration between) { + meterHistogram(meter, ctx, eventName, "us", between.toNanos()*1000); + } + + public void meterHistogram(Meter meter, IInstrumentationAttributes ctx, String eventName, String units, long value) { + if (ctx == null) { + return; + } + meter.histogramBuilder(eventName) + .ofLongs() + .setUnit(units) + .build().record(value, ctx.getPopulatedAttributesBuilder() + .put("labelName", eventName) + .build()); + } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java index 00539c58f..d73cf9cf3 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java @@ -1,42 +1,21 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.api.trace.SpanBuilder; -import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.context.Context; -import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.logs.SdkLoggerProvider; -import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; -import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import lombok.AllArgsConstructor; import java.time.Duration; import java.time.Instant; -import java.util.Optional; -import java.util.concurrent.TimeUnit; -public class SimpleMeteringClosure { +@AllArgsConstructor +public class SimpleMeteringClosure { + public final T ctx; public final Meter meter; - public SimpleMeteringClosure(String scopeName) { - meter = GlobalOpenTelemetry.getMeter(scopeName); + public void meterIncrementEvent(String eventName) { + meterIncrementEvent(eventName, 1); } - public void meterIncrementEvent(IInstrumentationAttributes ctx, String eventName) { - meterIncrementEvent(ctx, eventName, 1); - } - - public void meterIncrementEvent(IInstrumentationAttributes ctx, String eventName, long increment) { + public void meterIncrementEvent(String eventName, long increment) { if (ctx == null) { return; } @@ -46,33 +25,22 @@ public void meterIncrementEvent(IInstrumentationAttributes ctx, String eventName .build()); } - public void meterDeltaEvent(IInstrumentationAttributes ctx, String eventName, long delta) { - if (ctx == null) { - return; - } + public void meterDeltaEvent(String eventName, long delta) { meter.upDownCounterBuilder(eventName) .build().add(delta, ctx.getPopulatedAttributesBuilder() .put("labelName", eventName) .build()); } - public void meterHistogramMillis(T ctx, String eventName) { - meterHistogram(ctx, eventName, "ms", Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); - } - - public void meterHistogramMicros(T ctx, String eventName) { - meterHistogram(ctx, eventName, "us", Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); + public void meterHistogramMicros(String eventName, Duration between) { + meterHistogram(eventName, "us", between.toNanos()*1000); } - public void meterHistogramMillis(IInstrumentationAttributes ctx, String eventName, Duration between) { - meterHistogram(ctx, eventName, "ms", between.toMillis()); + public void meterHistogramMillis(String eventName, Duration between) { + meterHistogram(eventName, "ms", between.toMillis()); } - public void meterHistogramMicros(IInstrumentationAttributes ctx, String eventName, Duration between) { - meterHistogram(ctx, eventName, "us", between.toNanos()*1000); - } - - public void meterHistogram(IInstrumentationAttributes ctx, String eventName, String units, long value) { + public void meterHistogram(String eventName, String units, long value) { if (ctx == null) { return; } @@ -83,4 +51,15 @@ public void meterHistogram(IInstrumentationAttributes ctx, String eventName, Str .put("labelName", eventName) .build()); } + + public void meterHistogramMillis(String eventName) { + meterHistogram(eventName, "ms", + Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); + } + + public void meterHistogramMicros(String eventName) { + meterHistogram(eventName, "us", + Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); + } + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 845b13f40..729f8712e 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -8,6 +8,7 @@ public interface IConnectionContext extends IScopedInstrumentationAttributes { static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); + String CHANNEL_SCOPE = "Channel"; String getConnectionId(); String getNodeId(); @@ -20,4 +21,5 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder.put(CONNECTION_ID_ATTR, getConnectionId()) .put(NODE_ID_ATTR, getNodeId()); } + default String getScopeName() { return CHANNEL_SCOPE; } } diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index f68b9fff6..8e61c3b6c 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -6,6 +6,11 @@ public class TestContext implements IScopedInstrumentationAttributes { public static final TestContext singleton = new TestContext(); + @Override + public String getScopeName() { + return "TestContext"; + } + @Override public IInstrumentationAttributes getEnclosingScope() { return null; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index 1201b7f5b..b33c2966f 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -32,13 +32,12 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob boolean shouldCapture, HttpRequest httpRequest) throws Exception { if (shouldCapture && shouldBlockPredicate.test(httpRequest)) { - METERING_CLOSURE.meterIncrementEvent(messageContext, "blockingRequestUntilFlush"); + messageContext.meterIncrementEvent("blockingRequestUntilFlush"); rotateNextMessageContext(HttpMessageContext.HttpTransactionState.INTERNALLY_BLOCKED); trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); - METERING_CLOSURE.meterIncrementEvent(messageContext, - t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); - METERING_CLOSURE.meterHistogramMicros(messageContext, + messageContext.meterIncrementEvent(t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); + messageContext.meterHistogramMicros( t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro"); messageContext.endSpan(); @@ -57,7 +56,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob } }); } else { - METERING_CLOSURE.meterIncrementEvent(messageContext, "nonBlockingRequest"); + messageContext.meterIncrementEvent("nonBlockingRequest"); // TODO - log capturing vs non-capturing too super.channelFinishedReadingAnHttpMessage(ctx, msg, shouldCapture, httpRequest); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 085619e87..70184db44 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -35,8 +35,6 @@ @Slf4j public class LoggingHttpHandler extends ChannelDuplexHandler { - public static final String TELEMETRY_SCOPE_NAME = "CapturingHttpHandler"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); private static final MetricsLogger metricsLogger = new MetricsLogger("LoggingHttpRequestHandler"); static class CaptureIgnoreState { @@ -148,7 +146,7 @@ public LoggingHttpHandler(@NonNull IInstrumentConstructor contextConstructor, St var parentContext = new ConnectionContext(contextConstructor, channelKey, nodeId); this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.HttpTransactionState.REQUEST); - METERING_CLOSURE.meterIncrementEvent(messageContext, "requestStarted"); + messageContext.meterIncrementEvent("requestStarted"); this.trafficOffloader = trafficOffloaderFactory.createOffloader(parentContext, channelKey); var captureState = new CaptureState(); @@ -172,7 +170,7 @@ private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - METERING_CLOSURE.meterIncrementEvent(messageContext, "unregistered"); + messageContext.meterIncrementEvent("unregistered"); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -189,7 +187,7 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { - METERING_CLOSURE.meterIncrementEvent(messageContext, "handlerRemoved"); + messageContext.meterIncrementEvent("handlerRemoved"); messageContext.close(); messageContext.getLogicalEnclosingScope().close(); @@ -210,7 +208,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob HttpRequest httpRequest) throws Exception { rotateNextMessageContext(HttpMessageContext.HttpTransactionState.WAITING); super.channelRead(ctx, msg); - METERING_CLOSURE.meterIncrementEvent(messageContext, "requestReceived"); + messageContext.meterIncrementEvent("requestReceived"); metricsLogger.atSuccess(MetricsEvent.RECEIVED_FULL_HTTP_REQUEST) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()) @@ -229,8 +227,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception var bb = ((ByteBuf) msg); httpDecoderChannel.writeInbound(bb.retainedDuplicate()); // the ByteBuf is consumed/release by this method - METERING_CLOSURE.meterIncrementEvent(messageContext, - getHandlerThatHoldsParsedHttpRequest().haveParsedFullRequest ? "requestFullyParsed" : "requestPartiallyParsed"); + messageContext.meterIncrementEvent(getHandlerThatHoldsParsedHttpRequest().haveParsedFullRequest + ? "requestFullyParsed" : "requestPartiallyParsed"); var captureState = requestParsingHandler.captureState; var shouldCapture = captureState.shouldCapture(); @@ -244,8 +242,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); - METERING_CLOSURE.meterIncrementEvent(messageContext, "read"); - METERING_CLOSURE.meterIncrementEvent(messageContext, "readBytes", bb.readableBytes()); + messageContext.meterIncrementEvent("read"); + messageContext.meterIncrementEvent("readBytes", bb.readableBytes()); if (requestParsingHandler.haveParsedFullRequest) { @@ -281,8 +279,8 @@ public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) } metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); - METERING_CLOSURE.meterIncrementEvent(messageContext, "write"); - METERING_CLOSURE.meterIncrementEvent(messageContext, "writeBytes", bb.readableBytes()); + messageContext.meterIncrementEvent("write"); + messageContext.meterIncrementEvent("writeBytes", bb.readableBytes()); super.write(ctx, msg, promise); } @@ -290,7 +288,7 @@ public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - METERING_CLOSURE.meterIncrementEvent(messageContext, "exception"); + messageContext.meterIncrementEvent("exception"); httpDecoderChannel.close(); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index 77a64f651..5b47bcd66 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -56,6 +56,8 @@ public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestI this.sourceRequestIndex = sourceRequestIndex; this.startTime = Instant.now(); this.state = state; - setCurrentSpan("CapturingHttpHandler",getSpanLabelForState(state)); + setCurrentSpan(getSpanLabelForState(state)); } + + @Override public String getScopeName() { return "CapturingHttpHandler"; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index 325b4efed..f0e4cd9c3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -10,8 +10,6 @@ import java.util.concurrent.atomic.AtomicLong; public class Accumulation { - public static final String TELEMETRY_SCOPE_NAME = "Accumulator"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); enum State { // Ignore all initial READs, the first EOM & the following WRITEs (if they or EOMs exist) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index c46924521..998a09d78 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -4,27 +4,25 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import java.time.Instant; import java.util.List; public interface AccumulationCallbacks { void onRequestReceived(@NonNull UniqueReplayerRequestKey key, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request); void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrpp); void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); - void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IChannelKeyContext ctx); + void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IReplayContexts.IChannelKeyContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index be78a9d83..e47989c9a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -8,7 +8,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -102,7 +102,7 @@ public void onExpireAccumulation(String partitionId, Accumulation accumulation) @AllArgsConstructor private static class SpanWrappingAccumulationCallbacks { private final AccumulationCallbacks underlying; - public void onRequestReceived(IContexts.IRequestAccumulationContext requestCtx, + public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { requestCtx.endSpan(); underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), @@ -125,7 +125,7 @@ public void onConnectionClose(@NonNull Accumulation accum, } public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IContexts.ITrafficStreamsLifecycleContext tsCtx, + IReplayContexts.ITrafficStreamsLifecycleContext tsCtx, @NonNull List trafficStreamKeysBeingHeld) { underlying.onTrafficStreamsExpired(status, tsCtx.getLogicalEnclosingScope(), trafficStreamKeysBeingHeld); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index dd953f08d..5656dbdf3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -14,14 +14,12 @@ import io.opentelemetry.context.ContextKey; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import java.net.URI; import java.util.concurrent.CompletableFuture; @@ -32,8 +30,6 @@ public class ClientConnectionPool { private static final ContextKey RECORD_ID_KEY = ContextKey.named("recordId"); public static final String TELEMETRY_SCOPE_NAME = "ClientConnectionPool"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - public static final String TARGET_CONNECTION_POOL_NAME = "targetConnectionPool"; private final URI serverUri; private final SslContext sslContext; @@ -68,7 +64,7 @@ public ConnectionReplaySession load(final String s) { } private DiagnosticTrackableCompletableFuture - getResilientClientChannelProducer(EventLoop eventLoop, IChannelKeyContext connectionContext) { + getResilientClientChannelProducer(EventLoop eventLoop, IReplayContexts.IChannelKeyContext connectionContext) { return new AdaptiveRateLimiter() .get(() -> { var clientConnectionChannelCreatedFuture = @@ -142,7 +138,7 @@ public void closeConnection(String connId) { } public Future - submitEventualSessionGet(IChannelKeyContext ctx, boolean ignoreIfNotPresent) { + submitEventualSessionGet(IReplayContexts.IChannelKeyContext ctx, boolean ignoreIfNotPresent) { ConnectionReplaySession channelFutureAndSchedule = getCachedSession(ctx, ignoreIfNotPresent); if (channelFutureAndSchedule == null) { var rval = new DefaultPromise(eventLoopGroup.next()); @@ -159,7 +155,7 @@ public void closeConnection(String connId) { } @SneakyThrows - public ConnectionReplaySession getCachedSession(IChannelKeyContext channelKey, boolean dontCreate) { + public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyContext channelKey, boolean dontCreate) { var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKey.getConnectionId()) : connectionId2ChannelCache.get(channelKey.getConnectionId()); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java index ffec2ad7b..0658ce350 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java @@ -2,10 +2,9 @@ import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; public interface PacketConsumerFactory { IPacketFinalizingConsumer create(UniqueReplayerRequestKey requestKey, - IContexts.IReplayerHttpTransactionContext context); + IReplayContexts.IReplayerHttpTransactionContext context); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index 6dba100bb..140f7b5bd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -7,8 +7,7 @@ import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -28,7 +27,7 @@ public PacketToTransformingHttpHandlerFactory(IJsonTransformer jsonTransformer, @Override public IPacketFinalizingConsumer> - create(UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext httpTransactionContext) { + create(UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { log.trace("creating HttpJsonTransformingConsumer"); return new HttpJsonTransformingConsumer<>(jsonTransformer, authTransformerFactory, new TransformedPacketReceiver(), httpTransactionContext); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index d07d31150..c3712409b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -9,9 +9,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; @@ -125,7 +123,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - scheduleTransformationWork(IContexts.IReplayerHttpTransactionContext requestCtx, Instant originalStart, + scheduleTransformationWork(IReplayContexts.IReplayerHttpTransactionContext requestCtx, Instant originalStart, Supplier> task) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "processing"; @@ -137,7 +135,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx, + scheduleRequest(UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext ctx, Instant originalStart, Instant originalEnd, int numPackets, Stream packets) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); @@ -157,7 +155,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant public DiagnosticTrackableCompletableFuture closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, - IChannelKeyContext ctx, Instant timestamp) { + IReplayContexts.IChannelKeyContext ctx, Instant timestamp) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 9340a4987..3298ca2bd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -6,8 +6,8 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; @@ -40,41 +40,41 @@ public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStr this.firstTrafficStreamKeyForRequest = startingAtTrafficStreamKey; var requestKey = new UniqueReplayerRequestKey(startingAtTrafficStreamKey, startingSourceRequestIndex, indexOfCurrentRequest); - var httpTransactionContext = new Contexts.HttpTransactionContext( + var httpTransactionContext = new ReplayContexts.HttpTransactionContext( startingAtTrafficStreamKey.getTrafficStreamsContext(), requestKey); - requestOrResponseAccumulationContext = new Contexts.RequestAccumulationContext(httpTransactionContext); + requestOrResponseAccumulationContext = new ReplayContexts.RequestAccumulationContext(httpTransactionContext); } @NonNull ISourceTrafficChannelKey getBeginningTrafficStreamKey() { return firstTrafficStreamKeyForRequest; } - public IContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { + public IReplayContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { var looseCtx = requestOrResponseAccumulationContext; // the req/response ctx types in the assert below will always implement this with the // IReplayerHttpTransactionContext parameter, but this seems clearer // than trying to engineer a compile time static check assert looseCtx instanceof IWithTypedEnclosingScope; - assert looseCtx instanceof IContexts.IRequestAccumulationContext - || looseCtx instanceof IContexts.IResponseAccumulationContext; - return ((IWithTypedEnclosingScope) looseCtx) + assert looseCtx instanceof IReplayContexts.IRequestAccumulationContext + || looseCtx instanceof IReplayContexts.IResponseAccumulationContext; + return ((IWithTypedEnclosingScope) looseCtx) .getLogicalEnclosingScope(); } - public @NonNull IContexts.IRequestAccumulationContext getRequestContext() { - return (IContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; + public @NonNull IReplayContexts.IRequestAccumulationContext getRequestContext() { + return (IReplayContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; } - public @NonNull IContexts.IResponseAccumulationContext getResponseContext() { - return (IContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; + public @NonNull IReplayContexts.IResponseAccumulationContext getResponseContext() { + return (IReplayContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; } public void rotateRequestGatheringToResponse() { var looseCtx = requestOrResponseAccumulationContext; - assert looseCtx instanceof IContexts.IRequestAccumulationContext; - requestOrResponseAccumulationContext = new Contexts.ResponseAccumulationContext( + assert looseCtx instanceof IReplayContexts.IRequestAccumulationContext; + requestOrResponseAccumulationContext = new ReplayContexts.ResponseAccumulationContext( getRequestContext().getLogicalEnclosingScope()); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index d5a235906..373283dbe 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -10,12 +10,10 @@ import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; import org.opensearch.migrations.replay.datatypes.ChannelTask; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import java.time.Duration; import java.time.Instant; @@ -39,14 +37,14 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { } public DiagnosticTrackableCompletableFuture - scheduleWork(IContexts.IReplayerHttpTransactionContext ctx, Instant timestamp, + scheduleWork(IReplayContexts.IReplayerHttpTransactionContext ctx, Instant timestamp, Supplier> task) { var connectionSession = clientConnectionPool.getCachedSession(ctx.getChannelKeyContext(), false); var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); log.atDebug().setMessage(()->"Scheduling work for "+ctx.getConnectionId()+" at time "+timestamp).log(); - var scheduledContext = new Contexts.ScheduledContext(ctx); + var scheduledContext = new ReplayContexts.ScheduledContext(ctx); // this method doesn't use the scheduling that scheduleRequest and scheduleClose use because // doing work associated with a connection is considered to be preprocessing work independent // of the underlying network connection itself, so it's fair to be able to do this without @@ -68,7 +66,7 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx, + scheduleRequest(UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext ctx, Instant start, Duration interval, Stream packets) { var finalTunneledResponse = new StringTrackableCompletableFuture(new CompletableFuture<>(), @@ -80,7 +78,7 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { channelFutureAndRequestSchedule, finalTunneledResponse, start, interval, packets)); } - public StringTrackableCompletableFuture scheduleClose(IChannelKeyContext ctx, + public StringTrackableCompletableFuture scheduleClose(IReplayContexts.IChannelKeyContext ctx, int channelInteractionNum, Instant timestamp) { var channelKey = ctx.getChannelKey(); @@ -103,7 +101,7 @@ public StringTrackableCompletableFuture scheduleClose(IChannelKeyContext c } private DiagnosticTrackableCompletableFuture - asynchronouslyInvokeRunnableToSetupFuture(IChannelKeyContext ctx, int channelInteractionNumber, + asynchronouslyInvokeRunnableToSetupFuture(IReplayContexts.IChannelKeyContext ctx, int channelInteractionNumber, boolean ignoreIfChannelNotPresent, DiagnosticTrackableCompletableFuture finalTunneledResponse, Consumer successFn) { @@ -157,7 +155,7 @@ public StringTrackableCompletableFuture scheduleClose(IChannelKeyContext c return finalTunneledResponse; } - private void scheduleOnConnectionReplaySession(IChannelKeyContext ctx, int channelInteractionIdx, + private void scheduleOnConnectionReplaySession(IReplayContexts.IChannelKeyContext ctx, int channelInteractionIdx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture futureToBeCompletedByTask, Instant atTime, ChannelTask task) { @@ -208,15 +206,15 @@ private void scheduleOnConnectionReplaySession(IChannelKeyContext ctx, int c }), ()->""); } - private void scheduleSendOnConnectionReplaySession(IContexts.IReplayerHttpTransactionContext ctx, + private void scheduleSendOnConnectionReplaySession(IReplayContexts.IReplayerHttpTransactionContext ctx, ConnectionReplaySession channelFutureAndRequestSchedule, StringTrackableCompletableFuture responseFuture, Instant start, Duration interval, Stream packets) { var eventLoop = channelFutureAndRequestSchedule.eventLoop; var packetReceiverRef = new AtomicReference(); Runnable packetSender = () -> { - try (var targetContext = new Contexts.TargetRequestContext(ctx); - var requestContext = new Contexts.RequestSendingContext(targetContext)) { + try (var targetContext = new ReplayContexts.TargetRequestContext(ctx); + var requestContext = new ReplayContexts.RequestSendingContext(targetContext)) { sendNextPartAndContinue(() -> memoizePacketConsumer(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), packetReceiverRef), @@ -254,7 +252,7 @@ private long getDelayFromNowMs(Instant to) { } private static NettyPacketToHttpConsumer - memoizePacketConsumer(IContexts.IReplayerHttpTransactionContext httpTransactionContext, ChannelFuture channelFuture, + memoizePacketConsumer(IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext, ChannelFuture channelFuture, AtomicReference packetReceiver) { if (packetReceiver.get() == null) { packetReceiver.set(new NettyPacketToHttpConsumer(channelFuture, httpTransactionContext)); @@ -267,8 +265,8 @@ private void sendNextPartAndContinue(Supplier packetH EventLoop eventLoop, Iterator iterator, Instant start, Duration interval, AtomicInteger counter, StringTrackableCompletableFuture responseFuture, - Contexts.TargetRequestContext targetContext, - Contexts.RequestSendingContext requestContext) { + ReplayContexts.TargetRequestContext targetContext, + ReplayContexts.RequestSendingContext requestContext) { log.atTrace().setMessage(()->"sendNextPartAndContinue: counter=" + counter.get()).log(); var packetReceiver = packetHandlerSupplier.get(); assert iterator.hasNext() : "Should not have called this with no items to send"; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 993a9e604..2c0a2b546 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -8,16 +8,14 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import io.netty.util.concurrent.Future; -import io.opentelemetry.sdk.OpenTelemetrySdk; import lombok.AllArgsConstructor; import lombok.Lombok; import lombok.NonNull; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; @@ -78,8 +76,6 @@ public class TrafficReplayer { private static final MetricsLogger TUPLE_METRICS_LOGGER = new MetricsLogger("SourceTargetCaptureTuple"); - private static final SimpleMeteringClosure METERING_CLOSURE = - new SimpleMeteringClosure("TrafficReplayer"); public static final String SIGV_4_AUTH_HEADER_SERVICE_REGION_ARG = "--sigv4-auth-header-service-region"; public static final String AUTH_HEADER_VALUE_ARG = "--auth-header-value"; @@ -611,7 +607,7 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { @Override public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); @@ -630,7 +626,7 @@ public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, @Override public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrPair) { log.atInfo().setMessage(()->"Done receiving captured stream for " + requestKey + ":" + rrPair.requestData).log(); @@ -656,7 +652,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, // packaging it up and calling the callback. // Escalate it up out handling stack and shutdown. if (t == null || t instanceof Exception) { - try (var tupleHandlingContext = new Contexts.TupleHandlingContext(httpContext)) { + try (var tupleHandlingContext = new ReplayContexts.TupleHandlingContext(httpContext)) { packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); } commitTrafficStreams(context, rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); @@ -693,7 +689,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { commitTrafficStreams(ctx, trafficStreamKeysBeingHeld, status); } @@ -719,7 +715,7 @@ private void commitTrafficStreams(IInstrumentationAttributes context, @Override public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, int channelInteractionNum, - IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant timestamp, @NonNull List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); @@ -729,7 +725,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, int } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IChannelKeyContext ctx) { + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IReplayContexts.IChannelKeyContext ctx) { commitTrafficStreams(ctx, List.of(tsk), true); } @@ -883,7 +879,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture transformAndSendRequest(ReplayEngine replayEngine, HttpMessageAndTimestamp request, - UniqueReplayerRequestKey requestKey, IContexts.IReplayerHttpTransactionContext ctx) { + UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext ctx) { return transformAndSendRequest(inputRequestTransformerFactory, replayEngine, ctx, request.getFirstPacketTimestamp(), request.getLastPacketTimestamp(), requestKey, request.packetBytes::stream); @@ -891,7 +887,7 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture transformAndSendRequest(PacketToTransformingHttpHandlerFactory inputRequestTransformerFactory, - ReplayEngine replayEngine, IContexts.IReplayerHttpTransactionContext ctx, + ReplayEngine replayEngine, IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull Instant start, @NonNull Instant end, UniqueReplayerRequestKey requestKey, Supplier> packetsSupplier) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 85d424f60..6220a820e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -22,11 +22,9 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; @@ -41,8 +39,6 @@ @Slf4j public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer { - public static final String TELEMETRY_SCOPE_NAME = "HttpSender"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); /** * Set this to of(LogLevel.ERROR) or whatever level you'd like to get logging between each handler. @@ -60,18 +56,18 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - IContexts.ITargetRequestContext parentContext; + IReplayContexts.ITargetRequestContext parentContext; IScopedInstrumentationAttributes currentRequestContext; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, - Contexts.HttpTransactionContext httpTransactionContext) { + ReplayContexts.HttpTransactionContext httpTransactionContext) { this(createClientConnection(eventLoopGroup, sslContext, serverUri, httpTransactionContext.getLogicalEnclosingScope()), httpTransactionContext); } - public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IContexts.IReplayerHttpTransactionContext ctx) { - this.parentContext = new Contexts.TargetRequestContext(ctx); - this.currentRequestContext = new Contexts.RequestSendingContext(this.parentContext); + public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IReplayContexts.IReplayerHttpTransactionContext ctx) { + this.parentContext = new ReplayContexts.TargetRequestContext(ctx); + this.currentRequestContext = new ReplayContexts.RequestSendingContext(this.parentContext); responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), @@ -95,7 +91,7 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IContexts.IRepl } public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, SslContext sslContext, - URI serverUri, IChannelKeyContext channelKeyContext) { + URI serverUri, IReplayContexts.IChannelKeyContext channelKeyContext) { String host = serverUri.getHost(); int port = serverUri.getPort(); log.atTrace().setMessage(()->"Active - setting up backend connection to " + host + ":" + port).log(); @@ -162,7 +158,7 @@ private void activateChannelForThisConsumer() { addLoggingHandler(pipeline, "B"); pipeline.addLast(new BacksideSnifferHandler(responseBuilder, ()->{ this.currentRequestContext.close(); - this.currentRequestContext = new Contexts.ReceivingHttpResponseContext(this.parentContext); + this.currentRequestContext = new ReplayContexts.ReceivingHttpResponseContext(this.parentContext); })); addLoggingHandler(pipeline, "C"); @@ -219,7 +215,7 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa return activeChannelFuture; } - private IContexts.IReplayerHttpTransactionContext httpContext() { + private IReplayContexts.IReplayerHttpTransactionContext httpContext() { return parentContext.getLogicalEnclosingScope(); } @@ -228,7 +224,7 @@ private IContexts.IReplayerHttpTransactionContext httpContext() { final var completableFuture = new DiagnosticTrackableCompletableFuture(new CompletableFuture<>(), ()->"CompletableFuture that will wait for the netty future to fill in the completion value"); final int readableBytes = packetData.readableBytes(); - METERING_CLOSURE.meterIncrementEvent(currentRequestContext, "readBytes", packetData.readableBytes()); + this.currentRequestContext.meterIncrementEvent("readBytes", packetData.readableBytes()); channel.writeAndFlush(packetData) .addListener((ChannelFutureListener) future -> { Throwable cause = null; @@ -277,7 +273,7 @@ private IContexts.IReplayerHttpTransactionContext httpContext() { finalizeRequest() { var ff = activeChannelFuture.getDeferredFutureThroughHandle((v,t)-> { this.currentRequestContext.close(); - this.currentRequestContext = new Contexts.WaitingForHttpResponseContext(parentContext); + this.currentRequestContext = new ReplayContexts.WaitingForHttpResponseContext(parentContext); var future = new CompletableFuture(); var rval = new DiagnosticTrackableCompletableFuture(future, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index cf369112e..877885421 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -7,8 +7,8 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; @@ -47,15 +47,12 @@ */ @Slf4j public class HttpJsonTransformingConsumer implements IPacketFinalizingConsumer> { - public static final String TELEMETRY_SCOPE_NAME = "HttpTransformer"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - public static final int HTTP_MESSAGE_NUM_SEGMENTS = 2; public static final int EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS = 4; private final RequestPipelineOrchestrator pipelineOrchestrator; private final EmbeddedChannel channel; private static final MetricsLogger metricsLogger = new MetricsLogger("HttpJsonTransformingConsumer"); - private Contexts.RequestTransformationContext transformationContext; + private ReplayContexts.RequestTransformationContext transformationContext; /** * Roughly try to keep track of how big each data chunk was that came into the transformer. These values @@ -72,8 +69,8 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume public HttpJsonTransformingConsumer(IJsonTransformer transformer, IAuthTransformerFactory authTransformerFactory, IPacketFinalizingConsumer transformedPacketReceiver, - IContexts.IReplayerHttpTransactionContext httpTransactionContext) { - transformationContext = new Contexts.RequestTransformationContext(httpTransactionContext); + IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { + transformationContext = new ReplayContexts.RequestTransformationContext(httpTransactionContext); chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); @@ -141,9 +138,9 @@ public DiagnosticTrackableCompletableFuture { transformationContext.endSpan(); - METERING_CLOSURE.meterIncrementEvent(transformationContext, - t != null ? "transformRequestFailed" : "transformRequestSuccess"); - METERING_CLOSURE.meterHistogramMicros(transformationContext, "transformationDuration"); + transformationContext.meterIncrementEvent(t != null ? "transformRequestFailed" : + "transformRequestSuccess"); + transformationContext.meterHistogramMicros("transformationDuration"); if (t != null) { t = unwindPossibleCompletionException(t); if (t instanceof NoContentException) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 2ee09e821..07934f9b6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -10,8 +10,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datahandlers.PayloadAccessFaultingMap; import org.opensearch.migrations.replay.datahandlers.PayloadNotLoadedException; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IJsonTransformer; @@ -28,13 +27,13 @@ public class NettyDecodedHttpRequestPreliminaryConvertHandler extends Channel final IJsonTransformer transformer; final List> chunkSizes; final String diagnosticLabel; - private IContexts.IReplayerHttpTransactionContext httpTransactionContext; + private IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext; static final MetricsLogger metricsLogger = new MetricsLogger("NettyDecodedHttpRequestPreliminaryConvertHandler"); public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, - IContexts.IReplayerHttpTransactionContext httpTransactionContext) { + IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java index 641f4d916..0d7fd432e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettySendByteBufsToPacketHandlerHandler.java @@ -8,8 +8,7 @@ import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; @@ -30,10 +29,10 @@ public class NettySendByteBufsToPacketHandlerHandler extends ChannelInboundHa DiagnosticTrackableCompletableFuture currentFuture; private AtomicReference>> packetReceiverCompletionFutureRef; - IContexts.IReplayerHttpTransactionContext httpTransactionContext; + IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext; public NettySendByteBufsToPacketHandlerHandler(IPacketFinalizingConsumer packetReceiver, - IContexts.IReplayerHttpTransactionContext httpTransactionContext) { + IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { this.packetReceiver = packetReceiver; this.packetReceiverCompletionFutureRef = new AtomicReference<>(); this.httpTransactionContext = httpTransactionContext; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index e3590f94b..9e0919556 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -10,7 +10,7 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -42,14 +42,14 @@ public class RequestPipelineOrchestrator { public static final String HTTP_REQUEST_DECODER_NAME = "HTTP_REQUEST_DECODER"; private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; - private IContexts.IReplayerHttpTransactionContext httpTransactionContext; + private IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext; @Getter final IAuthTransformerFactory authTransfomerFactory; public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, - IContexts.IReplayerHttpTransactionContext httpTransactionContext) { + IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java index 117999eb0..e018f3c61 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java @@ -6,7 +6,7 @@ import lombok.Setter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.OnlineRadixSorter; @@ -34,7 +34,7 @@ public class ConnectionReplaySession { @Getter @Setter - private IChannelKeyContext channelContext; + private IReplayContexts.IChannelKeyContext channelContext; public ConnectionReplaySession(EventLoop eventLoop) { this.eventLoop = eventLoop; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java index b398ec088..17feb6fe3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java @@ -1,11 +1,10 @@ package org.opensearch.migrations.replay.datatypes; import lombok.NonNull; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; public interface ISourceTrafficChannelKey { String getNodeId(); String getConnectionId(); - @NonNull IContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); + @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java index 8ca33f1cd..ab9d6ced4 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java @@ -1,8 +1,5 @@ package org.opensearch.migrations.replay.datatypes; -import lombok.NonNull; -import org.opensearch.migrations.replay.tracing.IContexts; - public interface ITrafficStreamKey extends ISourceTrafficChannelKey { int getTrafficStreamIndex(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java index e945c9f40..ab5de2d81 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java @@ -1,13 +1,12 @@ package org.opensearch.migrations.replay.datatypes; -import java.util.StringJoiner; import java.util.function.Function; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; import lombok.Setter; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -16,11 +15,11 @@ public class PojoTrafficStreamKeyAndContext extends PojoTrafficStreamKey { @Getter @Setter @NonNull - IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; + IReplayContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; public static PojoTrafficStreamKeyAndContext build(TrafficStream stream, - Function contextSupplier) { + Function contextSupplier) { var rval = new PojoTrafficStreamKeyAndContext(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); rval.setTrafficStreamsContext(contextSupplier.apply(rval)); @@ -32,7 +31,7 @@ protected PojoTrafficStreamKeyAndContext(TrafficStream stream) { } public static PojoTrafficStreamKeyAndContext build(String nodeId, String connectionId, int index, Function contextSupplier) { + IReplayContexts.ITrafficStreamsLifecycleContext> contextSupplier) { var rval = new PojoTrafficStreamKeyAndContext(nodeId, connectionId, index); rval.setTrafficStreamsContext(contextSupplier.apply(rval)); return rval; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 0f7959ed9..87a61acd3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -14,8 +14,7 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; -import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -67,17 +66,12 @@ */ @Slf4j public class KafkaTrafficCaptureSource implements ISimpleTrafficCaptureSource { - - public static final String TELEMETRY_SCOPE_NAME = "KafkaSource"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - public static final String MAX_POLL_INTERVAL_KEY = "max.poll.interval.ms"; // see https://stackoverflow.com/questions/39730126/difference-between-session-timeout-ms-and-max-poll-interval-ms-for-kafka-0-10 public static final String DEFAULT_POLL_INTERVAL_MS = "60000"; private static final MetricsLogger metricsLogger = new MetricsLogger("KafkaProtobufConsumer"); - final TrackingKafkaConsumer trackingKafkaConsumer; private final ExecutorService kafkaExecutor; private final AtomicLong trafficStreamsRead; @@ -107,13 +101,13 @@ public KafkaTrafficCaptureSource(@NonNull IInstrumentationAttributes globalConte private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { var looseParentScope = trafficStreamKey.getTrafficStreamsContext().getEnclosingScope(); - if (!(looseParentScope instanceof Contexts.KafkaRecordContext)) { - throw new IllegalArgumentException("Expected parent context of type " + Contexts.KafkaRecordContext.class + + if (!(looseParentScope instanceof ReplayContexts.KafkaRecordContext)) { + throw new IllegalArgumentException("Expected parent context of type " + ReplayContexts.KafkaRecordContext.class + " instead of " + looseParentScope + " (of type=" + looseParentScope.getClass() + ")"); } - var kafkaCtx = (Contexts.KafkaRecordContext) looseParentScope; + var kafkaCtx = (ReplayContexts.KafkaRecordContext) looseParentScope; kafkaCtx.endSpan(); - channelContextManager.releaseContextFor((ChannelKeyContext) kafkaCtx.getImmediateEnclosingScope()); + channelContextManager.releaseContextFor((ReplayContexts.ChannelKeyContext) kafkaCtx.getImmediateEnclosingScope()); } public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull IInstrumentationAttributes globalContext, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index bbe4353e6..c73b6081a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -11,10 +11,10 @@ import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.slf4j.event.Level; import java.time.Clock; @@ -42,10 +42,6 @@ */ @Slf4j public class TrackingKafkaConsumer implements ConsumerRebalanceListener { - public static final String TELEMETRY_SCOPE_NAME = "TrackingKafkaConsumer"; - private static final SimpleMeteringClosure METERING_CLOSURE = - new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); - @AllArgsConstructor private static class OrderedKeyHolder implements Comparable { @Getter final long offset; @@ -76,28 +72,46 @@ public int hashCode() { public static class TouchScopeContext extends DirectNestedSpanContext { public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "touch"); + setCurrentSpan("touch"); + } + + @Override + public String getScopeName() { + return IReplayContexts.KAFKA_CONSUMER_SCOPE; } } public static class PollScopeContext extends DirectNestedSpanContext { public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "kafkaPoll"); + setCurrentSpan("kafkaPoll"); + } + @Override + public String getScopeName() { + return IReplayContexts.KAFKA_CONSUMER_SCOPE; } + } public static class CommitScopeContext extends DirectNestedSpanContext { public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "commit"); + setCurrentSpan("commit"); + } + @Override + public String getScopeName() { + return IReplayContexts.KAFKA_CONSUMER_SCOPE; } } public static class KafkaCommitScopeContext extends DirectNestedSpanContext { public KafkaCommitScopeContext(@NonNull CommitScopeContext enclosingScope) { super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "kafkaCommit"); + setCurrentSpan("kafkaCommit"); + } + @Override + public String getScopeName() { + return IReplayContexts.KAFKA_CONSUMER_SCOPE; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java index 9671315fe..f150321b5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java @@ -4,9 +4,8 @@ import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; -import org.opensearch.migrations.replay.tracing.Contexts; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; +import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.util.StringJoiner; @@ -19,12 +18,12 @@ class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext i private final int partition; private final long offset; - TrafficStreamKeyWithKafkaRecordId(Function contextFactory, + TrafficStreamKeyWithKafkaRecordId(Function contextFactory, TrafficStream trafficStream, String recordId, KafkaCommitOffsetData ok) { this(contextFactory, trafficStream, recordId, ok.getGeneration(), ok.getPartition(), ok.getOffset()); } - TrafficStreamKeyWithKafkaRecordId(Function contextFactory, + TrafficStreamKeyWithKafkaRecordId(Function contextFactory, TrafficStream trafficStream, String recordId, int generation, int partition, long offset) { super(trafficStream); @@ -32,8 +31,8 @@ class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext i this.partition = partition; this.offset = offset; var channelKeyContext = contextFactory.apply(this); - var kafkaContext = new Contexts.KafkaRecordContext(channelKeyContext, recordId); - this.setTrafficStreamsContext(new Contexts.TrafficStreamsLifecycleContext(kafkaContext, this)); + var kafkaContext = new ReplayContexts.KafkaRecordContext(channelKeyContext, recordId); + this.setTrafficStreamsContext(new ReplayContexts.TrafficStreamsLifecycleContext(kafkaContext, this)); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index b49754278..6ab207904 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -2,17 +2,12 @@ import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.ISpanGenerator; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import java.util.HashMap; import java.util.function.Function; -public class ChannelContextManager implements Function { - public static final String TELEMETRY_SCOPE_NAME = "Channel"; - public static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure(TELEMETRY_SCOPE_NAME); +public class ChannelContextManager implements Function { private final IInstrumentationAttributes globalContext; public ChannelContextManager(IInstrumentationAttributes globalContext) { @@ -20,14 +15,14 @@ public ChannelContextManager(IInstrumentationAttributes globalContext) { } private static class RefCountedContext { - @Getter final ChannelKeyContext context; + @Getter final ReplayContexts.ChannelKeyContext context; private int refCount; - private RefCountedContext(ChannelKeyContext context) { + private RefCountedContext(ReplayContexts.ChannelKeyContext context) { this.context = context; } - ChannelKeyContext retain() { + ReplayContexts.ChannelKeyContext retain() { refCount++; return context; } @@ -46,16 +41,16 @@ boolean release() { HashMap connectionToChannelContextMap = new HashMap<>(); - public ChannelKeyContext apply(ITrafficStreamKey tsk) { + public ReplayContexts.ChannelKeyContext apply(ITrafficStreamKey tsk) { return retainOrCreateContext(tsk); } - public ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { + public ReplayContexts.ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), - k-> new RefCountedContext(new ChannelKeyContext(globalContext, tsk))).retain(); + k-> new RefCountedContext(new ReplayContexts.ChannelKeyContext(globalContext, tsk))).retain(); } - public ChannelKeyContext releaseContextFor(ChannelKeyContext ctx) { + public ReplayContexts.ChannelKeyContext releaseContextFor(ReplayContexts.ChannelKeyContext ctx) { var connId = ctx.getConnectionId(); var refCountedCtx = connectionToChannelContextMap.get(connId); assert ctx == refCountedCtx.context; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java deleted file mode 100644 index 1c7e72c74..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelKeyContext.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import lombok.Getter; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.tracing.AbstractNestedSpanContext; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IWithStartTime; - -public class ChannelKeyContext extends AbstractNestedSpanContext - implements IChannelKeyContext, IWithStartTime { - @Getter - final ISourceTrafficChannelKey channelKey; - - public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { - super(enclosingScope); - this.channelKey = channelKey; - setCurrentSpan("Connection", "channel"); - } - - @Override - public String toString() { - return channelKey.toString(); - } - -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java deleted file mode 100644 index 6fbdb38c8..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/Contexts.java +++ /dev/null @@ -1,168 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IndirectNestedSpanContext; - -public class Contexts { - - private Contexts() {} - - public static class KafkaRecordContext extends DirectNestedSpanContext - implements IContexts.IKafkaRecordContext { - final String recordId; - - public KafkaRecordContext(IChannelKeyContext enclosingScope, String recordId) { - super(enclosingScope); - this.recordId = recordId; - setCurrentSpan("Kafka", "recordLifetime"); - } - - @Override - public String getRecordId() { - return recordId; - } - } - - public static class TrafficStreamsLifecycleContext - extends IndirectNestedSpanContext - implements IContexts.ITrafficStreamsLifecycleContext { - private final ITrafficStreamKey trafficStreamKey; - - public TrafficStreamsLifecycleContext(IContexts.IKafkaRecordContext enclosingScope, - ITrafficStreamKey trafficStreamKey) { - super(enclosingScope); - this.trafficStreamKey = trafficStreamKey; - setCurrentSpan("KafkaRecords", "trafficStreamLifetime"); - } - - @Override - public IChannelKeyContext getChannelKeyContext() { - return getLogicalEnclosingScope(); - } - - @Override - public ITrafficStreamKey getTrafficStreamKey() { - return trafficStreamKey; - } - - @Override - public IChannelKeyContext getLogicalEnclosingScope() { - return getImmediateEnclosingScope().getLogicalEnclosingScope(); - } - } - - public static class HttpTransactionContext - extends IndirectNestedSpanContext - implements IContexts.IReplayerHttpTransactionContext { - final UniqueReplayerRequestKey replayerRequestKey; - - public HttpTransactionContext(IContexts.ITrafficStreamsLifecycleContext enclosingScope, - UniqueReplayerRequestKey replayerRequestKey) { - super(enclosingScope); - this.replayerRequestKey = replayerRequestKey; - setCurrentSpan("Accumulator", "httpTransaction"); - } - - public IChannelKeyContext getChannelKeyContext() { - return getLogicalEnclosingScope(); - } - - @Override - public UniqueReplayerRequestKey getReplayerRequestKey() { - return replayerRequestKey; - } - - @Override - public String toString() { - return replayerRequestKey.toString(); - } - - @Override - public IChannelKeyContext getLogicalEnclosingScope() { - return getImmediateEnclosingScope().getLogicalEnclosingScope(); - } - } - - public static class RequestAccumulationContext - extends DirectNestedSpanContext - implements IContexts.IRequestAccumulationContext { - public RequestAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("Accumulator", "accumulatingRequest"); - } - } - - public static class ResponseAccumulationContext - extends DirectNestedSpanContext - implements IContexts.IResponseAccumulationContext { - public ResponseAccumulationContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("Accumulator", "accumulatingResponse"); - } - } - - public static class RequestTransformationContext - extends DirectNestedSpanContext - implements IContexts.IRequestTransformationContext { - public RequestTransformationContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("HttpTransformer", "transformation"); - } - } - - public static class ScheduledContext - extends DirectNestedSpanContext - implements IContexts.IScheduledContext { - public ScheduledContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("RequestSender", "scheduled"); - } - } - - public static class TargetRequestContext - extends DirectNestedSpanContext - implements IContexts.ITargetRequestContext { - public TargetRequestContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("RequestSender", "targetTransaction"); - } - } - - public static class RequestSendingContext - extends DirectNestedSpanContext - implements IContexts.IRequestSendingContext { - public RequestSendingContext(IContexts.ITargetRequestContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("RequestSender","requestSending"); - } - } - - public static class WaitingForHttpResponseContext - extends DirectNestedSpanContext - implements IContexts.IWaitingForHttpResponseContext { - public WaitingForHttpResponseContext(IContexts.ITargetRequestContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("RequestSender", "waitingForResponse"); - } - } - - public static class ReceivingHttpResponseContext - extends DirectNestedSpanContext - implements IContexts.IReceivingHttpResponseContext { - public ReceivingHttpResponseContext(IContexts.ITargetRequestContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("HttpSender", "receivingRequest"); - } - } - - public static class TupleHandlingContext - extends DirectNestedSpanContext - implements IContexts.ITupleHandlingContext { - public TupleHandlingContext(IContexts.IReplayerHttpTransactionContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("TrafficReplayer", "tupleHandling"); - } - } -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java deleted file mode 100644 index 0aa5ffa20..000000000 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IChannelKeyContext.java +++ /dev/null @@ -1,18 +0,0 @@ -package org.opensearch.migrations.replay.tracing; - -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; - -public interface IChannelKeyContext extends IConnectionContext { - // do not add this as a property - // because its components are already being added in the IConnectionContext implementation - ISourceTrafficChannelKey getChannelKey(); - - default String getConnectionId() { - return getChannelKey().getConnectionId(); - } - - default String getNodeId() { - return getChannelKey().getNodeId(); - } -} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java new file mode 100644 index 000000000..2759406a5 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -0,0 +1,16 @@ +package org.opensearch.migrations.replay.tracing; + +import org.opensearch.migrations.tracing.IInstrumentationAttributes; + +public interface IKafkaConsumerContexts { + interface IKafkaConsumerScope extends IInstrumentationAttributes { + @Override + default String getScopeName() { return IReplayContexts.KAFKA_CONSUMER_SCOPE; } + } + interface ITouchScopeContext extends IKafkaCommitScopeContext {} + interface IPollScopeContext extends IKafkaConsumerScope {} + + interface ICommitScopeContext extends IKafkaConsumerScope {} + + interface IKafkaCommitScopeContext extends IKafkaConsumerScope {} +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java similarity index 58% rename from TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java rename to TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 24df98e07..30e5b7724 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -7,15 +7,26 @@ import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; -public class IContexts { +public class IReplayContexts { + + public static final String KAFKA_RECORD_SCOPE = "KafkaRecord"; + public static final String TRAFFIC_STREAM_LIFETIME_SCOPE = "TrafficStreamLifetime"; + public static final String ACCUMULATOR_SCOPE = "Accumulator"; + public static final String KAFKA_CONSUMER_SCOPE = "TrackingKafkaConsumer"; + public static final String HTTP_TRANSFORMER_SCOPE = "HttpTransformer"; + public static final String REQUEST_SENDER_SCOPE = "RequestSender"; + public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; + public interface IKafkaRecordContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); String getRecordId(); + default String getScopeName() { return KAFKA_RECORD_SCOPE; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { return IScopedInstrumentationAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); } @@ -25,6 +36,7 @@ public interface ITrafficStreamsLifecycleContext extends IChannelKeyContext, IWithTypedEnclosingScope { ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); + default String getScopeName() { return TRAFFIC_STREAM_LIFETIME_SCOPE; } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -37,6 +49,7 @@ public interface IReplayerHttpTransactionContext UniqueReplayerRequestKey getReplayerRequestKey(); IChannelKeyContext getChannelKeyContext(); + @Override default String getScopeName() { return ACCUMULATOR_SCOPE; } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -57,29 +70,61 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } public interface IRequestAccumulationContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return ACCUMULATOR_SCOPE; } + } public interface IResponseAccumulationContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return ACCUMULATOR_SCOPE; } + } public interface IRequestTransformationContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return HTTP_TRANSFORMER_SCOPE; } + } public interface IScheduledContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return REQUEST_SENDER_SCOPE; } + } public interface ITargetRequestContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return REQUEST_SENDER_SCOPE; } + } public interface IRequestSendingContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return REQUEST_SENDER_SCOPE; } + } public interface IWaitingForHttpResponseContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return REQUEST_SENDER_SCOPE; } + } public interface IReceivingHttpResponseContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return REQUEST_SENDER_SCOPE; } + } public interface ITupleHandlingContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { } + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + default String getScopeName() { return TRAFFIC_REPLAYER_SCOPE; } + } + + public static interface IChannelKeyContext extends IConnectionContext { + // do not add this as a property + // because its components are already being added in the IConnectionContext implementation + ISourceTrafficChannelKey getChannelKey(); + + default String getConnectionId() { + return getChannelKey().getConnectionId(); + } + + default String getNodeId() { + return getChannelKey().getNodeId(); + } + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java new file mode 100644 index 000000000..eaef35eea --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -0,0 +1,16 @@ +package org.opensearch.migrations.replay.tracing; + +import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; + +public interface ITrafficSourceContexts { + String TELEMETRY_SCOPE_NAME = "BlockingTrafficSource"; + + interface ITrafficSourceContext extends IScopedInstrumentationAttributes { + @Override + default String getScopeName() { return TELEMETRY_SCOPE_NAME; } + } + interface IReadChunkContext extends ITrafficSourceContext {} + interface IBackPressureBlockContext extends ITrafficSourceContext {} + interface IWaitForNextSignal extends ITrafficSourceContext {} +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java new file mode 100644 index 000000000..289412ae9 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -0,0 +1,42 @@ +package org.opensearch.migrations.replay.tracing; + +import lombok.NonNull; +import org.opensearch.migrations.replay.kafka.TrackingKafkaConsumer; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; + +public class KafkaConsumerContexts { + public static class TouchScopeContext extends DirectNestedSpanContext + implements IKafkaConsumerContexts.ITouchScopeContext + { + public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + super(enclosingScope); + setCurrentSpan("touch"); + } + } + + public static class PollScopeContext extends DirectNestedSpanContext + implements IKafkaConsumerContexts.IPollScopeContext { + public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + super(enclosingScope); + setCurrentSpan("kafkaPoll"); + } + } + + public static class CommitScopeContext extends DirectNestedSpanContext + implements IKafkaConsumerContexts.ICommitScopeContext { + public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + super(enclosingScope); + setCurrentSpan("commit"); + } + } + + public static class KafkaCommitScopeContext + extends DirectNestedSpanContext + implements IKafkaConsumerContexts.IKafkaCommitScopeContext { + public KafkaCommitScopeContext(@NonNull TrackingKafkaConsumer.CommitScopeContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("kafkaCommit"); + } + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java new file mode 100644 index 000000000..6b5814692 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -0,0 +1,192 @@ +package org.opensearch.migrations.replay.tracing; + +import lombok.Getter; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IWithStartTime; +import org.opensearch.migrations.tracing.IndirectNestedSpanContext; + +public class ReplayContexts { + + private ReplayContexts() {} + + public static class ChannelKeyContext extends AbstractNestedSpanContext + implements IReplayContexts.IChannelKeyContext, IWithStartTime { + @Getter + final ISourceTrafficChannelKey channelKey; + + public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { + super(enclosingScope); + this.channelKey = channelKey; + setCurrentSpan("channel"); + } + + @Override + public String toString() { + return channelKey.toString(); + } + + @Override public String getScopeName() { return "Connection"; } + } + + public static class KafkaRecordContext extends DirectNestedSpanContext + implements IReplayContexts.IKafkaRecordContext { + final String recordId; + + public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, String recordId) { + super(enclosingScope); + this.recordId = recordId; + setCurrentSpan("recordLifetime"); + } + + @Override + public String getRecordId() { + return recordId; + } + } + + public static class TrafficStreamsLifecycleContext + extends IndirectNestedSpanContext + implements IReplayContexts.ITrafficStreamsLifecycleContext { + private final ITrafficStreamKey trafficStreamKey; + + public TrafficStreamsLifecycleContext(IReplayContexts.IKafkaRecordContext enclosingScope, + ITrafficStreamKey trafficStreamKey) { + super(enclosingScope); + this.trafficStreamKey = trafficStreamKey; + setCurrentSpan("trafficStreamLifetime"); + } + + @Override + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + return getLogicalEnclosingScope(); + } + + @Override + public ITrafficStreamKey getTrafficStreamKey() { + return trafficStreamKey; + } + + @Override + public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { + return getImmediateEnclosingScope().getLogicalEnclosingScope(); + } + } + + public static class HttpTransactionContext + extends IndirectNestedSpanContext + implements IReplayContexts.IReplayerHttpTransactionContext { + final UniqueReplayerRequestKey replayerRequestKey; + + public HttpTransactionContext(IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, + UniqueReplayerRequestKey replayerRequestKey) { + super(enclosingScope); + this.replayerRequestKey = replayerRequestKey; + setCurrentSpan("httpTransaction"); + } + + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + return getLogicalEnclosingScope(); + } + + @Override + public UniqueReplayerRequestKey getReplayerRequestKey() { + return replayerRequestKey; + } + + @Override + public String toString() { + return replayerRequestKey.toString(); + } + + @Override + public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { + return getImmediateEnclosingScope().getLogicalEnclosingScope(); + } + } + + public static class RequestAccumulationContext + extends DirectNestedSpanContext + implements IReplayContexts.IRequestAccumulationContext { + public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("accumulatingRequest"); + } + } + + public static class ResponseAccumulationContext + extends DirectNestedSpanContext + implements IReplayContexts.IResponseAccumulationContext { + public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("accumulatingResponse"); + } + } + + public static class RequestTransformationContext + extends DirectNestedSpanContext + implements IReplayContexts.IRequestTransformationContext { + public RequestTransformationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("transformation"); + } + } + + public static class ScheduledContext + extends DirectNestedSpanContext + implements IReplayContexts.IScheduledContext { + public ScheduledContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("scheduled"); + } + } + + public static class TargetRequestContext + extends DirectNestedSpanContext + implements IReplayContexts.ITargetRequestContext { + public TargetRequestContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("targetTransaction"); + } + } + + public static class RequestSendingContext + extends DirectNestedSpanContext + implements IReplayContexts.IRequestSendingContext { + public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("requestSending"); + } + } + + public static class WaitingForHttpResponseContext + extends DirectNestedSpanContext + implements IReplayContexts.IWaitingForHttpResponseContext { + public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("waitingForResponse"); + } + } + + public static class ReceivingHttpResponseContext + extends DirectNestedSpanContext + implements IReplayContexts.IReceivingHttpResponseContext { + public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("receivingRequest"); + } + } + + public static class TupleHandlingContext + extends DirectNestedSpanContext + implements IReplayContexts.ITupleHandlingContext { + public TupleHandlingContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("tupleHandling"); + } + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java new file mode 100644 index 000000000..dbded0b0b --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -0,0 +1,38 @@ +package org.opensearch.migrations.replay.tracing; + +import lombok.NonNull; +import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; + +public class TrafficSourceContexts { + + public static class ReadChunkContext + extends DirectNestedSpanContext + implements ITrafficSourceContexts.IReadChunkContext + { + public ReadChunkContext(T enclosingScope) { + super(enclosingScope); + setCurrentSpan("readNextTrafficStreamChunk"); + } + } + + public static class BackPressureBlockContext + extends DirectNestedSpanContext + implements ITrafficSourceContexts.IBackPressureBlockContext + { + public BackPressureBlockContext(@NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("backPressureBlock"); + } + } + + public static class WaitForNextSignal + extends DirectNestedSpanContext + implements ITrafficSourceContexts.IReadChunkContext { + public WaitForNextSignal(@NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { + super(enclosingScope); + setCurrentSpan("waitForNextBackPressureCheck"); + } + } + +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 0b8213383..5edbd3ac1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -1,11 +1,11 @@ package org.opensearch.migrations.replay.traffic.source; import com.google.protobuf.Timestamp; -import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; +import org.opensearch.migrations.replay.tracing.TrafficSourceContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.slf4j.event.Level; @@ -36,7 +36,6 @@ */ @Slf4j public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlowController { - private static final String TELEMETRY_SCOPE_NAME = "BlockingTrafficSource"; private final ISimpleTrafficCaptureSource underlyingSource; private final AtomicReference lastTimestampSecondsRef; @@ -47,28 +46,6 @@ public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlo private final Semaphore readGate; private final Duration bufferTimeWindow; - public static class ReadChunkContext - extends DirectNestedSpanContext { - public ReadChunkContext(T enclosingScope) { - super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "readNextTrafficStreamChunk"); - } - } - - public static class BackPressureBlockContext extends DirectNestedSpanContext { - public BackPressureBlockContext(@NonNull ReadChunkContext enclosingScope) { - super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "backPressureBlock"); - } - } - - public static class WaitForNextSignal extends DirectNestedSpanContext { - public WaitForNextSignal(@NonNull BackPressureBlockContext enclosingScope) { - super(enclosingScope); - setCurrentSpan(TELEMETRY_SCOPE_NAME, "waitForNextBackPressureCheck"); - } - } - public BlockingTrafficSource(ISimpleTrafficCaptureSource underlying, Duration bufferTimeWindow) { this.underlyingSource = underlying; this.stopReadingAtRef = new AtomicReference<>(Instant.EPOCH); @@ -114,7 +91,7 @@ public Duration getBufferTimeWindow() { @Override public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { - var readContext = new ReadChunkContext(context); + var readContext = new TrafficSourceContexts.ReadChunkContext(context); log.info("BlockingTrafficSource::readNext"); var trafficStreamListFuture = CompletableFuture .supplyAsync(() -> blockIfNeeded(readContext), task -> new Thread(task).start()) @@ -140,14 +117,14 @@ public Duration getBufferTimeWindow() { }); } - private Void blockIfNeeded(ReadChunkContext readContext) { + private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) { if (stopReadingAtRef.get().equals(Instant.EPOCH)) { return null; } log.atInfo().setMessage(() -> "stopReadingAtRef=" + stopReadingAtRef + " lastTimestampSecondsRef=" + lastTimestampSecondsRef).log(); - BackPressureBlockContext blockContext = null; + ITrafficSourceContexts.IBackPressureBlockContext blockContext = null; while (stopReadingAtRef.get().isBefore(lastTimestampSecondsRef.get())) { if (blockContext == null) { - blockContext = new BackPressureBlockContext(readContext); + blockContext = new TrafficSourceContexts.BackPressureBlockContext(readContext); } try { log.atInfo().setMessage("blocking until signaled to read the next chunk last={} stop={}") @@ -157,7 +134,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { var nextTouchOp = underlyingSource.getNextRequiredTouch(); if (nextTouchOp.isEmpty()) { log.trace("acquiring readGate semaphore (w/out timeout)"); - try (var waitContext = new WaitForNextSignal(blockContext)) { + try (var waitContext = new TrafficSourceContexts.WaitForNextSignal(blockContext)) { readGate.acquire(); } } else { @@ -172,7 +149,7 @@ private Void blockIfNeeded(ReadChunkContext readContext) { // if this doesn't succeed, we'll loop around & likely do a touch, then loop around again. // if it DOES succeed, we'll loop around and make sure that there's not another reason to stop log.atTrace().setMessage(() -> "acquring readGate semaphore with timeout=" + waitIntervalMs).log(); - try (var waitContext = new WaitForNextSignal(blockContext)) { + try (var waitContext = new TrafficSourceContexts.WaitForNextSignal(blockContext)) { readGate.tryAcquire(waitIntervalMs, TimeUnit.MILLISECONDS); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 70a1a61b7..fe37de8bd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -9,8 +9,7 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -34,20 +33,21 @@ public InputStreamOfTraffic(IInstrumentationAttributes context, InputStream inpu } private static class IOSTrafficStreamContext - extends DirectNestedSpanContext - implements IContexts.ITrafficStreamsLifecycleContext { + extends DirectNestedSpanContext + implements IReplayContexts.ITrafficStreamsLifecycleContext { @Getter private final ITrafficStreamKey trafficStreamKey; - public IOSTrafficStreamContext(@NonNull IChannelKeyContext ctx, ITrafficStreamKey tsk) { + public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, ITrafficStreamKey tsk) { super(ctx); this.trafficStreamKey = tsk; - setCurrentSpan(TELEMETRY_SCOPE_NAME, "trafficStreamLifecycle"); + setCurrentSpan("trafficStreamLifecycle"); } @Override - public IChannelKeyContext getChannelKeyContext() { + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getImmediateEnclosingScope(); } + @Override public String getScopeName() { return TELEMETRY_SCOPE_NAME; } } /** diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index a59665956..44ea0c7d5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -14,14 +14,13 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; @@ -184,7 +183,7 @@ private static class TrafficStreamCursorKey implements ITrafficStreamKey, Compar public final String connectionId; public final String nodeId; public final int trafficStreamIndex; - @Getter public final IContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; + @Getter public final IReplayContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; public TrafficStreamCursorKey(TrafficStream stream, int arrayIndex) { connectionId = stream.getConnectionId(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index b504fde56..8a9dd0c82 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -16,10 +16,8 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.RootOtelContext; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -218,14 +216,14 @@ void generateAndTest(String testName, int bufferSize, int skipCount, new AccumulationCallbacks() { @Override public void onRequestReceived(UniqueReplayerRequestKey key, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, HttpMessageAndTimestamp request) { requestsReceived.incrementAndGet(); } @Override public void onFullDataReceived(UniqueReplayerRequestKey requestKey, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, RequestResponsePacketPair fullPair) { var sourceIdx = requestKey.getSourceRequestIndex(); if (fullPair.completionStatus == @@ -246,19 +244,19 @@ public void onFullDataReceived(UniqueReplayerRequestKey requestKey, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IChannelKeyContext ctx) { + IReplayContexts.IChannelKeyContext ctx) { tsIndicesReceived.add(tsk.getTrafficStreamIndex()); } }); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index a5a7d5c7e..b05104fe6 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -9,8 +9,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.TestContext; @@ -160,7 +159,7 @@ public void testReader() throws Exception { new AccumulationCallbacks() { @Override public void onRequestReceived(UniqueReplayerRequestKey id, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); @@ -169,7 +168,7 @@ public void onRequestReceived(UniqueReplayerRequestKey id, @Override public void onFullDataReceived(UniqueReplayerRequestKey key, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); @@ -177,19 +176,19 @@ public void onFullDataReceived(UniqueReplayerRequestKey key, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IChannelKeyContext ctx) {} + IReplayContexts.IChannelKeyContext ctx) {} }); var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); @@ -215,7 +214,7 @@ public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { new AccumulationCallbacks() { @Override public void onRequestReceived(UniqueReplayerRequestKey id, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); @@ -224,7 +223,7 @@ public void onRequestReceived(UniqueReplayerRequestKey id, @Override public void onFullDataReceived(UniqueReplayerRequestKey key, - IContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); @@ -232,17 +231,17 @@ public void onFullDataReceived(UniqueReplayerRequestKey key, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) {} @Override public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IChannelKeyContext ctx) {} + IReplayContexts.IChannelKeyContext ctx) {} } ); byte[] serializedChunks; diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 797fb077b..e62b37245 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,6 +1,6 @@ package org.opensearch.migrations.replay; -import org.opensearch.migrations.replay.tracing.Contexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; @@ -9,20 +9,18 @@ public class TestRequestKey { public static final String TEST_NODE_ID = "testNodeId"; public static final String DEFAULT_TEST_CONNECTION = "testConnection"; - private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("test"); - private TestRequestKey() {} - public static final Contexts.HttpTransactionContext getTestConnectionRequestContext(int replayerIdx) { + public static final ReplayContexts.HttpTransactionContext getTestConnectionRequestContext(int replayerIdx) { return getTestConnectionRequestContext(DEFAULT_TEST_CONNECTION, replayerIdx); } - public static Contexts.HttpTransactionContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { + public static ReplayContexts.HttpTransactionContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, tsk -> new TestTrafficStreamsLifecycleContext(tsk)), 0, replayerIdx); - return new Contexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk); + return new ReplayContexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index 119060b52..745490f8b 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -1,28 +1,28 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.ChannelKeyContext; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.replay.tracing.IChannelKeyContext; -import org.opensearch.migrations.replay.tracing.IContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.RootOtelContext; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; class TestTrafficStreamsLifecycleContext - extends DirectNestedSpanContext - implements IContexts.ITrafficStreamsLifecycleContext { - private static final SimpleMeteringClosure METERING_CLOSURE = new SimpleMeteringClosure("test"); + extends DirectNestedSpanContext + implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; public TestTrafficStreamsLifecycleContext(ITrafficStreamKey tsk) { - super(new ChannelKeyContext(new RootOtelContext(), tsk)); + super(new ReplayContexts.ChannelKeyContext(new RootOtelContext(), tsk)); this.trafficStreamKey = tsk; - setCurrentSpan("testScope","testSpan"); + setCurrentSpan("testSpan"); } @Override - public IChannelKeyContext getChannelKeyContext() { + public String getScopeName() { return "testScope"; } + + @Override + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getLogicalEnclosingScope(); } From 613a50468b317d8e003684f6fb0e07b469041829 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 22 Dec 2023 20:28:29 -0500 Subject: [PATCH 37/94] Refactor TestContext so that it doesn't use statics and allows callers to bring it up with or w/out InMemory exporters. Signed-off-by: Greg Schohn --- TrafficCapture/coreUtilities/build.gradle | 2 +- .../migrations/tracing/TestContext.java | 58 ++++++++++++++++--- .../replay/tracing/ReplayContexts.java | 2 +- .../replay/BlockingTrafficSourceTest.java | 6 +- .../replay/FullTrafficReplayerTest.java | 12 ++-- .../KafkaRestartingTrafficReplayerTest.java | 6 +- .../replay/TrafficReplayerRunner.java | 2 +- .../replay/TrafficReplayerTest.java | 12 ++-- .../KafkaCommitsWorkBetweenLongPolls.java | 33 +---------- .../replay/kafka/KafkaKeepAliveTests.java | 10 ++-- ...KafkaTrafficCaptureSourceLongTermTest.java | 6 +- .../kafka/KafkaTrafficCaptureSourceTest.java | 10 ++-- 12 files changed, 88 insertions(+), 71 deletions(-) diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index 16ed03c38..0fd1e6171 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -70,7 +70,7 @@ dependencies { testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' - testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk' + testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk-testing' } configurations.all { diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index 8e61c3b6c..e09a1d4fc 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -1,11 +1,60 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.trace.Span; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.export.SpanExporter; import lombok.Getter; +import java.time.Duration; +import java.util.Optional; + public class TestContext implements IScopedInstrumentationAttributes { - public static final TestContext singleton = new TestContext(); + @Getter + public IInstrumentConstructor rootInstrumentationScope = new RootOtelContext(); + @Getter + public Span currentSpan; + @Getter + public final InMemorySpanExporter testSpanExporter; + @Getter + public final InMemoryMetricExporter testMetricExporter; + + public static TestContext withTracking() { + return new TestContext(InMemorySpanExporter.create(), InMemoryMetricExporter.create()); + } + public static TestContext noTracking() { + return new TestContext(null, null); + } + + public TestContext(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter testMetricExporter) { + this.testSpanExporter = testSpanExporter; + this.testMetricExporter = testMetricExporter; + + var otelBuilder = OpenTelemetrySdk.builder(); + if (testSpanExporter != null) { + otelBuilder = otelBuilder.setTracerProvider(SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(testSpanExporter)).build()); + } + if (testMetricExporter != null) { + otelBuilder = otelBuilder.setMeterProvider(SdkMeterProvider.builder() + .registerMetricReader(PeriodicMetricReader.builder(testMetricExporter) + .setInterval(Duration.ofMillis(100)) + .build()) + .build()); + } + var openTel = otelBuilder.build(); + currentSpan = new RootOtelContext(openTel) + .buildSpanWithoutParent("testScope", "testSpan"); + } @Override public String getScopeName() { return "TestContext"; @@ -16,11 +65,4 @@ public IInstrumentationAttributes getEnclosingScope() { return null; } - @Getter public IInstrumentConstructor rootInstrumentationScope = new RootOtelContext(); - - @Getter - public Span currentSpan; - public TestContext() { - currentSpan = new RootOtelContext().buildSpanWithoutParent("testScope", "testSpan"); - } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 6b5814692..08d996d2e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -32,7 +32,7 @@ public String toString() { @Override public String getScopeName() { return "Connection"; } } - + public static class KafkaRecordContext extends DirectNestedSpanContext implements IReplayContexts.IKafkaRecordContext { final String recordId; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 1f23a4bc1..4bd59027f 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -46,7 +46,7 @@ void readNextChunkTest() throws Exception { blockingSource.stopReadsPast(sourceStartTime.plus(Duration.ofMillis(0))); var firstChunk = new ArrayList(); for (int i = 0; i<=BUFFER_MILLIS+SHIFT; ++i) { - var nextPieceFuture = blockingSource.readNextTrafficStreamChunk(TestContext.singleton); + var nextPieceFuture = blockingSource.readNextTrafficStreamChunk(TestContext.noTracking()); nextPieceFuture.get(500000, TimeUnit.MILLISECONDS) .forEach(ts->firstChunk.add(ts)); } @@ -54,7 +54,7 @@ void readNextChunkTest() throws Exception { Assertions.assertTrue(BUFFER_MILLIS+SHIFT <= firstChunk.size()); Instant lastTime = null; for (int i =SHIFT; iTestHttpServerContext.makeResponse(random, response)); @@ -105,7 +107,7 @@ public void testDoubleRequestWithCloseIsCommittedOnce() throws Throwable { var fixedTimestamp = Timestamp.newBuilder().setSeconds(baseTime.getEpochSecond()).setNanos(baseTime.getNano()).build(); var tsb = TrafficStream.newBuilder().setConnectionId("C"); - for (int i=0; i<2; ++i) { + for (int i=0; i new SentinelSensingTrafficSource( - new KafkaTrafficCaptureSource(TestContext.singleton, buildKafkaConsumer(), TEST_TOPIC_NAME, + new KafkaTrafficCaptureSource(TestContext.noTracking(), buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)))); log.info("done"); } @@ -169,7 +169,7 @@ Producer buildKafkaProducer() { try { for (int i = 0; i < recordCount; ++i) { List chunks = null; - chunks = originalTrafficSource.readNextTrafficStreamChunk(TestContext.singleton).get(); + chunks = originalTrafficSource.readNextTrafficStreamChunk(TestContext.noTracking()).get(); for (int j = 0; j < chunks.size(); ++j) { KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, chunks.get(j).getStream(), TEST_TOPIC_NAME, "KEY_" + i + "_" + j); @@ -180,7 +180,7 @@ Producer buildKafkaProducer() { throw Lombok.sneakyThrow(e); } }); - return () -> new KafkaTrafficCaptureSource(TestContext.singleton, kafkaConsumer, TEST_TOPIC_NAME, + return () -> new KafkaTrafficCaptureSource(TestContext.noTracking(), kafkaConsumer, TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index 8f1dd500c..60b7fdf6c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -141,7 +141,7 @@ private static void runTrafficReplayer(Supplier cap URI endpoint, Consumer tupleReceiver) throws Exception { log.info("Starting a new replayer and running it"); - var tr = new TrafficReplayer(TestContext.singleton, endpoint, null, + var tr = new TrafficReplayer(TestContext.noTracking(), endpoint, null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10*1024); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index b05104fe6..d9f9c144c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -115,9 +115,9 @@ public void testDelimitedDeserializer() throws Exception { try (var bais = new ByteArrayInputStream(serializedChunks)) { AtomicInteger counter = new AtomicInteger(0); var allMatch = new AtomicBoolean(true); - try (var trafficProducer = new InputStreamOfTraffic(TestContext.singleton, bais)) { + try (var trafficProducer = new InputStreamOfTraffic(TestContext.noTracking(), bais)) { while (true) { - trafficProducer.readNextTrafficStreamChunk(TestContext.singleton).get().stream() + trafficProducer.readNextTrafficStreamChunk(TestContext.noTracking()).get().stream() .forEach(ts->{ var i = counter.incrementAndGet(); var expectedStream = makeTrafficStream(timestamp.plus(i - 1, ChronoUnit.SECONDS), i); @@ -151,7 +151,7 @@ static byte[] synthesizeTrafficStreamsIntoByteArray(Instant timestamp, int numSt @Test public void testReader() throws Exception { - var tr = new TrafficReplayer(TestContext.singleton, + var tr = new TrafficReplayer(TestContext.noTracking(), new URI("http://localhost:9200"), null, null, false); List> byteArrays = new ArrayList<>(); CapturedTrafficToHttpTransactionAccumulator trafficAccumulator = @@ -193,7 +193,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); try (var bais = new ByteArrayInputStream(bytes)) { - try (var trafficSource = new InputStreamOfTraffic(TestContext.singleton, bais)) { + try (var trafficSource = new InputStreamOfTraffic(TestContext.noTracking(), bais)) { tr.pullCaptureFromSourceToAccumulator(trafficSource, trafficAccumulator); } } @@ -203,7 +203,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel @Test public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { - var tr = new TrafficReplayer(TestContext.singleton, + var tr = new TrafficReplayer(TestContext.noTracking(), new URI("http://localhost:9200"), null, null, false); List> byteArrays = new ArrayList<>(); var remainingAccumulations = new AtomicInteger(); @@ -262,7 +262,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel } try (var bais = new ByteArrayInputStream(serializedChunks)) { - try (var trafficSource = new InputStreamOfTraffic(TestContext.singleton, bais)) { + try (var trafficSource = new InputStreamOfTraffic(TestContext.noTracking(), bais)) { tr.pullCaptureFromSourceToAccumulator(trafficSource, trafficAccumulator); } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index b547c592a..d9c36bc9a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -43,33 +43,6 @@ public class KafkaCommitsWorkBetweenLongPolls { private final KafkaContainer embeddedKafkaBroker = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")); - private InMemorySpanExporter testSpanExporter; - private InMemoryMetricExporter testMetricExporter; - - @BeforeEach - void setup() { - GlobalOpenTelemetry.resetForTest(); - testSpanExporter = InMemorySpanExporter.create(); - testMetricExporter = InMemoryMetricExporter.create(); - - OpenTelemetrySdk.builder() - .setTracerProvider( - SdkTracerProvider.builder() - .addSpanProcessor(SimpleSpanProcessor.create(testSpanExporter)).build()) - .setMeterProvider( - SdkMeterProvider.builder() - .registerMetricReader(PeriodicMetricReader.builder(testMetricExporter) - .setInterval(Duration.ofMillis(100)) - .build()) - .build()) - .buildAndRegisterGlobal(); - } - - @AfterEach - void tearDown() { - GlobalOpenTelemetry.resetForTest(); - } - @SneakyThrows private KafkaConsumer buildKafkaConsumer() { var kafkaConsumerProps = KafkaTrafficCaptureSource.buildKafkaProperties(embeddedKafkaBroker.getBootstrapServers(), @@ -83,7 +56,7 @@ private KafkaConsumer buildKafkaConsumer() { @Test @Tag("longTest") public void testThatCommitsAndReadsKeepWorking() throws Exception { - var kafkaSource = new KafkaTrafficCaptureSource(TestContext.singleton, buildKafkaConsumer(), + var kafkaSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS/3)); var blockingSource = new BlockingTrafficSource(kafkaSource, Duration.ofMinutes(5)); var kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); @@ -103,7 +76,7 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { var ts = chunks.get(0); Thread.sleep(DEFAULT_POLL_INTERVAL_MS*2); log.info("committing "+ts.getKey()); - blockingSource.commitTrafficStream(TestContext.singleton, ts.getKey()); + blockingSource.commitTrafficStream(TestContext.noTracking(), ts.getKey()); blockingSource.stopReadsPast(getTimeAtPoint(i)); } } catch (Exception e) { @@ -113,7 +86,7 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { for (int i=0; i(kafkaProperties); - this.kafkaSource = new KafkaTrafficCaptureSource(TestContext.singleton, + this.kafkaSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); this.trafficSource = new BlockingTrafficSource(kafkaSource, Duration.ZERO); this.keysReceived = new ArrayList<>(); @@ -88,7 +88,7 @@ public void testTimeoutsDontOccurForSlowPolls() throws Exception { try { var k = keysReceived.get(0); log.info("Calling commit traffic stream for "+k); - trafficSource.commitTrafficStream(TestContext.singleton, k); + trafficSource.commitTrafficStream(TestContext.noTracking(), k); log.info("finished committing traffic stream"); log.info("Stop reads to infinity"); // this is a way to signal back to the main thread that this thread is done @@ -114,7 +114,7 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti } readNextNStreams(trafficSource, keysReceived, 1, 1); - trafficSource.commitTrafficStream(TestContext.singleton, keysReceived.get(0)); + trafficSource.commitTrafficStream(TestContext.noTracking(), keysReceived.get(0)); log.info("Called commitTrafficStream but waiting long enough for the client to leave the group. " + "That will make the previous commit a 'zombie-commit' that should easily be dropped."); @@ -137,7 +137,7 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti keysReceived = new ArrayList<>(); log.atInfo().setMessage(()->"re-establish... 3 ..."+renderNextCommitsAsString()).log(); readNextNStreams(trafficSource, keysReceived, 0, 1); - trafficSource.commitTrafficStream(TestContext.singleton, keysReceivedUntilDrop1.get(1)); + trafficSource.commitTrafficStream(TestContext.noTracking(), keysReceivedUntilDrop1.get(1)); log.atInfo().setMessage(()->"re-establish... 4 ..."+renderNextCommitsAsString()).log(); readNextNStreams(trafficSource, keysReceived, 1, 1); log.atInfo().setMessage(()->"5 ..."+renderNextCommitsAsString()).log(); @@ -159,7 +159,7 @@ private static void readNextNStreams(BlockingTrafficSource kafkaSource, List{ var tsk = ts.getKey(); log.atInfo().setMessage(()->"checking for "+tsk).log(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index feb28d7cf..f0022d1d4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -42,7 +42,7 @@ public void testTrafficCaptureSource() throws Exception { final long MAX_POLL_MS = 10000; kafkaConsumerProps.setProperty(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY, MAX_POLL_MS+""); var kafkaConsumer = new KafkaConsumer(kafkaConsumerProps); - var kafkaTrafficCaptureSource = new KafkaTrafficCaptureSource(TestContext.singleton, + var kafkaTrafficCaptureSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_MS)); var kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); @@ -60,7 +60,7 @@ public void testTrafficCaptureSource() throws Exception { for (int i=0; i { - var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(TestContext.singleton) + var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(TestContext.noTracking()) .get(1, TimeUnit.SECONDS); if (rogueChunk.isEmpty()) { // TimeoutExceptions cannot be thrown by the supplier of the CompletableFuture today, BUT we diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index 108202cfa..bc79d201a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -45,7 +45,7 @@ public void testRecordToString() { .setNodeId("n") .setNumber(7) .build(); - var contextFactory = new ChannelContextManager(TestContext.singleton); + var contextFactory = new ChannelContextManager(TestContext.noTracking()); var tsk = new TrafficStreamKeyWithKafkaRecordId(contextFactory, ts, "testRecord", 1, 2, 123); Assertions.assertEquals("n.c.7|partition=2|offset=123", tsk.toString()); } @@ -54,7 +54,7 @@ public void testRecordToString() { public void testSupplyTrafficFromSource() { int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); - KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.singleton, + KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.noTracking(), mockConsumer, TEST_TOPIC_NAME, Duration.ofHours(1)); initializeMockConsumerTopic(mockConsumer); @@ -74,7 +74,7 @@ public void testSupplyTrafficFromSource() { var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { - protobufConsumer.readNextTrafficStreamChunk(TestContext.singleton).get().stream() + protobufConsumer.readNextTrafficStreamChunk(TestContext.noTracking()).get().stream() .forEach(streamWithKey -> { tsCount.incrementAndGet(); log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); @@ -96,7 +96,7 @@ public void testSupplyTrafficFromSource() { public void testSupplyTrafficWithUnformattedMessages() { int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); - KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.singleton, + KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.noTracking(), mockConsumer, TEST_TOPIC_NAME, Duration.ofHours(1)); initializeMockConsumerTopic(mockConsumer); @@ -126,7 +126,7 @@ public void testSupplyTrafficWithUnformattedMessages() { var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { - protobufConsumer.readNextTrafficStreamChunk(TestContext.singleton).get().stream() + protobufConsumer.readNextTrafficStreamChunk(TestContext.noTracking()).get().stream() .forEach(streamWithKey->{ tsCount.incrementAndGet(); log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); From 7df0dcc4717c8ad8a6227f578bba174dd3b7d2ad Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 22 Dec 2023 20:46:03 -0500 Subject: [PATCH 38/94] Remove a hardcoded path to my local directory Signed-off-by: Greg Schohn --- .../dockerSolution/src/main/docker/docker-compose.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index 18a2c9f89..e588aa0c9 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -41,7 +41,6 @@ services: volumes: # - ./otel-collector-config-demo.yaml:/etc/otel-collector-config-demo.yaml - ./otel-collector-config-demo.yaml:/etc/otelcol/config.yaml - - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs ports: - "1888:1888" # pprof extension - "8888:8888" # Prometheus metrics exposed by the collector @@ -88,7 +87,6 @@ services: - migrations volumes: - sharedReplayerOutput:/shared-replayer-output - - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs environment: - TUPLE_DIR_PATH=/shared-replayer-output/traffic-replayer-default depends_on: @@ -138,7 +136,6 @@ services: # - "13133:13133" # volumes: # - ./otelcol/otel-config.yml:/etc/otel-config.yml -# - /Users/schohn/dev/opensearch-migrations/TrafficCapture/containerLogs:/logs # networks: # - migrations # depends_on: From de0d482276ab1e9fca018a04c5c898e820e70d49 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 22 Dec 2023 22:53:40 -0500 Subject: [PATCH 39/94] Fixed bugs in trace management and forced a lot more test code to take a context into it so that span verifications can happen within unit tests. The FullTrafficReplayerTest now has a test that is verifying the number of spans that were reported. That was usefuly to work through to fix the bugs in double counting some spans. Signed-off-by: Greg Schohn --- .../migrations/tracing/TestContext.java | 6 +-- .../build.gradle | 1 + .../replay/AddCompressionEncodingTest.java | 3 +- .../replay/PayloadRepackingTest.java | 7 +-- .../replay/RequestSenderOrchestrator.java | 19 +++---- .../http/HttpJsonTransformingConsumer.java | 3 +- .../replay/tracing/IReplayContexts.java | 28 +++++----- .../replay/BlockingTrafficSourceTest.java | 3 +- ...afficToHttpTransactionAccumulatorTest.java | 6 ++- ...xpiringTrafficStreamMapSequentialTest.java | 9 ++-- ...ExpiringTrafficStreamMapUnorderedTest.java | 9 ++-- .../replay/FullTrafficReplayerTest.java | 54 +++++++++++++++++-- .../replay/HeaderTransformerTest.java | 11 ++-- .../replay/ParsedHttpMessagesAsDictsTest.java | 4 +- .../replay/RequestSenderOrchestratorTest.java | 6 ++- .../replay/ResultsToLogsConsumerTest.java | 12 +++-- .../SigV4SigningTransformationTest.java | 3 +- ...afficToHttpTransactionAccumulatorTest.java | 15 ++++-- .../NettyPacketToHttpConsumerTest.java | 7 ++- .../HttpJsonTransformingConsumerTest.java | 15 +++--- .../migrations/replay/TestRequestKey.java | 11 ++-- .../TestTrafficStreamsLifecycleContext.java | 13 +++-- .../migrations/replay/TestUtils.java | 15 +++--- 23 files changed, 174 insertions(+), 86 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index e09a1d4fc..aa071bb2a 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -19,7 +19,7 @@ public class TestContext implements IScopedInstrumentationAttributes { @Getter - public IInstrumentConstructor rootInstrumentationScope = new RootOtelContext(); + public IInstrumentConstructor rootInstrumentationScope; @Getter public Span currentSpan; @Getter @@ -52,8 +52,8 @@ public TestContext(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter .build()); } var openTel = otelBuilder.build(); - currentSpan = new RootOtelContext(openTel) - .buildSpanWithoutParent("testScope", "testSpan"); + rootInstrumentationScope = new RootOtelContext(openTel); + currentSpan = null; } @Override public String getScopeName() { diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle index c1b9fbbda..814c9e2b8 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/build.gradle @@ -15,6 +15,7 @@ dependencies { testImplementation project(':replayerPlugins:jsonMessageTransformers:jsonJoltMessageTransformer') testImplementation project(':coreUtilities') testImplementation project(':trafficReplayer') + testImplementation testFixtures(project(path: ':coreUtilities')) testImplementation testFixtures(project(path: ':testUtilities')) testImplementation testFixtures(project(path: ':trafficReplayer')) diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java index 3bb3c936b..89d1b82fb 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java @@ -6,6 +6,7 @@ import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.JsonJoltTransformBuilder; import org.opensearch.migrations.transform.JsonJoltTransformer; @@ -34,7 +35,7 @@ public void addingCompressionRequestHeaderCompressesPayload() throws ExecutionEx JsonJoltTransformer.newBuilder() .addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP) .build(), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(TestContext.noTracking(), 0)); final var payloadPartSize = 511; final var numParts = 1025; diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java index 8ba273ce1..8a06232b6 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java @@ -9,6 +9,7 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.JsonJoltTransformBuilder; import org.opensearch.migrations.transform.JsonJoltTransformer; @@ -59,8 +60,8 @@ public void testSimplePayloadTransform(boolean doGzip, boolean doChunked) throws expectedRequestHeaders.add("host", "localhost"); expectedRequestHeaders.add("Content-Length", "46"); - TestUtils.runPipelineAndValidate(transformerBuilder.build(), null,null, - stringParts, expectedRequestHeaders, + TestUtils.runPipelineAndValidate(TestContext.noTracking(), transformerBuilder.build(), null, + null, stringParts, expectedRequestHeaders, referenceStringBuilder -> TestUtils.resolveReferenceString(referenceStringBuilder)); } @@ -104,7 +105,7 @@ public void testJsonPayloadTransformation() throws Exception { expectedRequestHeaders.add("content-type", "application/json; charset=UTF-8"); expectedRequestHeaders.add("Content-Length", "55"); - TestUtils.runPipelineAndValidate(transformerBuilder.build(), null, + TestUtils.runPipelineAndValidate(TestContext.noTracking(), transformerBuilder.build(), null, extraHeaders, List.of(jsonPayload), expectedRequestHeaders, x -> "{\"top\":[{\"Name\":\"A\",\"Value\":1},{\"Name\":\"B\",\"Value\":2}]}"); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index 373283dbe..bbfd8be7e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -213,14 +213,10 @@ private void scheduleSendOnConnectionReplaySession(IReplayContexts.IReplayerHttp var eventLoop = channelFutureAndRequestSchedule.eventLoop; var packetReceiverRef = new AtomicReference(); Runnable packetSender = () -> { - try (var targetContext = new ReplayContexts.TargetRequestContext(ctx); - var requestContext = new ReplayContexts.RequestSendingContext(targetContext)) { - sendNextPartAndContinue(() -> - memoizePacketConsumer(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), - packetReceiverRef), - eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture, - targetContext, requestContext); - } + sendNextPartAndContinue(() -> + memoizePacketConsumer(ctx, channelFutureAndRequestSchedule.getInnerChannelFuture(), + packetReceiverRef), + eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); }; scheduleOnConnectionReplaySession(ctx.getLogicalEnclosingScope(), ctx.getReplayerRequestKey().getSourceRequestIndex(), @@ -264,9 +260,7 @@ private long getDelayFromNowMs(Instant to) { private void sendNextPartAndContinue(Supplier packetHandlerSupplier, EventLoop eventLoop, Iterator iterator, Instant start, Duration interval, AtomicInteger counter, - StringTrackableCompletableFuture responseFuture, - ReplayContexts.TargetRequestContext targetContext, - ReplayContexts.RequestSendingContext requestContext) { + StringTrackableCompletableFuture responseFuture) { log.atTrace().setMessage(()->"sendNextPartAndContinue: counter=" + counter.get()).log(); var packetReceiver = packetHandlerSupplier.get(); assert iterator.hasNext() : "Should not have called this with no items to send"; @@ -275,13 +269,12 @@ private void sendNextPartAndContinue(Supplier packetH if (iterator.hasNext()) { counter.incrementAndGet(); Runnable packetSender = () -> sendNextPartAndContinue(packetHandlerSupplier, eventLoop, - iterator, start, interval, counter, responseFuture, targetContext, requestContext); + iterator, start, interval, counter, responseFuture); var delayMs = Duration.between(now(), start.plus(interval.multipliedBy(counter.get()))).toMillis(); eventLoop.schedule(packetSender, Math.min(0, delayMs), TimeUnit.MILLISECONDS); } else { packetReceiver.finalizeRequest().handle((v,t)-> { - targetContext.close(); if (t != null) { responseFuture.future.completeExceptionally(t); } else { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 877885421..65bfc82e3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -186,7 +186,8 @@ private static Throwable unwindPossibleCompletionException(Throwable t) { .setAttribute(MetricsAttributeKey.CHANNEL_ID, channel.id().asLongText()).emit(); return finalizedFuture.map(f->f.thenApply(r->reason == null ? new TransformedOutputAndResult(r, HttpRequestTransformationStatus.SKIPPED, null) : - new TransformedOutputAndResult(r, HttpRequestTransformationStatus.ERROR, reason)), + new TransformedOutputAndResult(r, HttpRequestTransformationStatus.ERROR, reason)) + .whenComplete((v,t)->transformationContext.close()), ()->"HttpJsonTransformingConsumer.redriveWithoutTransformation().map()"); } } \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 30e5b7724..57957fe3e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -20,6 +20,20 @@ public class IReplayContexts { public static final String REQUEST_SENDER_SCOPE = "RequestSender"; public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; + public interface IChannelKeyContext extends IConnectionContext { + // do not add this as a property + // because its components are already being added in the IConnectionContext implementation + ISourceTrafficChannelKey getChannelKey(); + + default String getConnectionId() { + return getChannelKey().getConnectionId(); + } + + default String getNodeId() { + return getChannelKey().getNodeId(); + } + } + public interface IKafkaRecordContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); @@ -113,18 +127,4 @@ public interface ITupleHandlingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { default String getScopeName() { return TRAFFIC_REPLAYER_SCOPE; } } - - public static interface IChannelKeyContext extends IConnectionContext { - // do not add this as a property - // because its components are already being added in the IConnectionContext implementation - ISourceTrafficChannelKey getChannelKey(); - - default String getConnectionId() { - return getChannelKey().getConnectionId(); - } - - default String getNodeId() { - return getChannelKey().getNodeId(); - } - } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 4bd59027f..c7680e3dc 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -101,7 +101,8 @@ private static class TestTrafficCaptureSource implements ISimpleTrafficCaptureSo .setClose(CloseObservation.getDefaultInstance()) .build()) .build(); - var key = PojoTrafficStreamKeyAndContext.build(ts, TestTrafficStreamsLifecycleContext::new); + var key = PojoTrafficStreamKeyAndContext.build(ts, + tsk->new TestTrafficStreamsLifecycleContext(context, tsk)); return CompletableFuture.completedFuture(List.of(new PojoTrafficStreamAndKey(ts, key))); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java index 8294aec17..d3fdfa504 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -7,6 +7,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -83,6 +84,7 @@ public void testAccumulatedSplit(String testName, int cutPoint, void accumulateWithAccumulatorPairAtPoint(TrafficStream[] trafficStreams, int cutPoint, int[] expectedRequestSizes, int[] expectedResponseSizes) { + var ctx = TestContext.noTracking(); List reconstructedTransactions = new ArrayList<>(); AtomicInteger requestsReceived = new AtomicInteger(0); // some of the messages up to the cutPoint may not have been able to be fully committed (when the @@ -92,11 +94,11 @@ void accumulateWithAccumulatorPairAtPoint(TrafficStream[] trafficStreams, int cu // // Notice that this may cause duplicates. That's by design. The system has an at-least-once guarantee. var indicesProcessedPass1 = - SimpleCapturedTrafficToHttpTransactionAccumulatorTest.accumulateTrafficStreamsWithNewAccumulator( + SimpleCapturedTrafficToHttpTransactionAccumulatorTest.accumulateTrafficStreamsWithNewAccumulator(ctx, Arrays.stream(trafficStreams).limit(cutPoint), reconstructedTransactions, requestsReceived); cutPoint = indicesProcessedPass1.isEmpty() ? 0 : indicesProcessedPass1.last(); var indicesProcessedPass2 = - SimpleCapturedTrafficToHttpTransactionAccumulatorTest.accumulateTrafficStreamsWithNewAccumulator( + SimpleCapturedTrafficToHttpTransactionAccumulatorTest.accumulateTrafficStreamsWithNewAccumulator(ctx, Arrays.stream(trafficStreams).skip(cutPoint), reconstructedTransactions, requestsReceived); // three checks to do w/ the indicesProcessed sets. diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java index 4d1f7c1b4..3413e68c5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java @@ -5,6 +5,7 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; +import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; import java.time.Duration; @@ -20,6 +21,7 @@ class ExpiringTrafficStreamMapSequentialTest { public static void testLinearExpirations(Function connectionGenerator, int window, int granularity, int expectedExpirationCounts[]) { + var context = TestContext.noTracking(); var expiredAccumulations = new ArrayList(); var expiringMap = new ExpiringTrafficStreamMap(Duration.ofSeconds(window), Duration.ofSeconds(granularity), new BehavioralPolicy() { @@ -34,14 +36,15 @@ public void onExpireAccumulation(String partitionId, for (int i=0; inew TestTrafficStreamsLifecycleContext(context, k)); var accumulation = expiringMap.getOrCreateWithoutExpiration(tsk, k->new Accumulation(tsk, 0)); createdAccumulations.add(accumulation); expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, - connectionGenerator.apply(i), 0, TestTrafficStreamsLifecycleContext::new), + connectionGenerator.apply(i), 0, k->new TestTrafficStreamsLifecycleContext(context, k)), accumulation, ts); var rrPair = createdAccumulations.get(i).getOrCreateTransactionPair( - PojoTrafficStreamKeyAndContext.build("n","c",1, TestTrafficStreamsLifecycleContext::new)); + PojoTrafficStreamKeyAndContext.build("n","c",1, + k->new TestTrafficStreamsLifecycleContext(context, k))); rrPair.addResponseData(ts, ("Add"+i).getBytes(StandardCharsets.UTF_8)); expiredCountsPerLoop.add(expiredAccumulations.size()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java index bed1fd9fd..09519e283 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java @@ -6,6 +6,7 @@ import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; import java.time.Duration; @@ -23,6 +24,7 @@ class ExpiringTrafficStreamMapUnorderedTest { public void testExpirations(Function connectionGenerator, int window, int granularity, int timestamps[], int expectedExpirationCounts[]) { + var context = TestContext.noTracking(); var expiredAccumulations = new ArrayList(); var expiringMap = new ExpiringTrafficStreamMap(Duration.ofSeconds(window), Duration.ofSeconds(granularity), new BehavioralPolicy() { @@ -37,15 +39,16 @@ public void onExpireAccumulation(String partitionId, for (int i=0; inew TestTrafficStreamsLifecycleContext(context, k)); var accumulation = expiringMap.getOrCreateWithoutExpiration(tsk, k->new Accumulation(tsk, 0)); expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, - connectionGenerator.apply(i), 0, TestTrafficStreamsLifecycleContext::new), + connectionGenerator.apply(i), 0, + k->new TestTrafficStreamsLifecycleContext(context, k)), accumulation, ts); createdAccumulations.add(accumulation); if (accumulation != null) { var rrPair = accumulation.getOrCreateTransactionPair(PojoTrafficStreamKeyAndContext.build("n","c",1, - TestTrafficStreamsLifecycleContext::new)); + k->new TestTrafficStreamsLifecycleContext(context, k))); rrPair.addResponseData(ts, ("Add" + i).getBytes(StandardCharsets.UTF_8)); } expiredCountsPerLoop.add(expiredAccumulations.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index adfa9b5ee..f2bf456dc 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -2,6 +2,8 @@ import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.data.SpanData; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; @@ -43,9 +45,12 @@ import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; @Slf4j // It would be great to test with leak detection here, but right now this test relies upon TrafficReplayer.shutdown() @@ -132,7 +137,8 @@ public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) thro .build(); var trafficSource = new ArrayCursorTrafficCaptureSource(new ArrayCursorTrafficSourceFactory(List.of(trafficStream))); - var tr = new TrafficReplayer(TestContext.noTracking(), httpServer.localhostEndpoint(), null, + var trackingContext = TestContext.withTracking(); + var tr = new TrafficReplayer(trackingContext, httpServer.localhostEndpoint(), null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10*1024); @@ -149,9 +155,49 @@ public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) thro } Assertions.assertEquals(numRequests, tuplesReceived.size()); + checkSpansForSimpleReplayedTransactions(trackingContext.testSpanExporter, numRequests); log.info("done"); } + /** + * This function is written like this rather than with a loop so that the backtrace will show WHICH + * key was corrupted. + */ + private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSpanExporter, int numRequests) { + var byName = testSpanExporter.getFinishedSpanItems().stream().collect(Collectors.groupingBy(SpanData::getName)); + BiConsumer chk = (i, k)-> { + Assertions.assertNotNull(byName.get(k)); + Assertions.assertEquals(i, byName.get(k).size()); + byName.remove(k); + }; + chk.accept(1,"channel"); + chk.accept(1, "testTrafficSpan"); + + chk.accept(numRequests, "accumulatingRequest"); + chk.accept(numRequests, "accumulatingResponse"); + chk.accept(numRequests, "httpTransaction"); + chk.accept(numRequests, "transformation"); + chk.accept(numRequests, "targetTransaction"); + chk.accept(numRequests, "scheduled"); + chk.accept(numRequests, "requestSending"); + chk.accept(numRequests, "waitingForResponse"); + chk.accept(numRequests, "tupleHandling"); + + Consumer chkNonZero = k-> { + Assertions.assertNotNull(byName.get(k)); + Assertions.assertFalse(byName.get(k).isEmpty()); + byName.remove(k); + }; + chkNonZero.accept("readNextTrafficStreamChunk"); + // ideally, we'd be getting these back too, but our requests are malformed, so the server closes, which + // may occur before we've started to accumulate the response. So - just ignore these, but make sure that + // there isn't anything else that we've missed. + byName.remove("receivingRequest"); + + Assertions.assertEquals("", byName.entrySet().stream() + .map(kvp->kvp.getKey()+":"+kvp.getValue()).collect(Collectors.joining())); + } + @ParameterizedTest @CsvSource(value = { "3,false", @@ -187,13 +233,13 @@ private static class TrafficStreamCursorKey implements ITrafficStreamKey, Compar public final int trafficStreamIndex; @Getter public final IReplayContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; - public TrafficStreamCursorKey(TrafficStream stream, int arrayIndex) { + public TrafficStreamCursorKey(IInstrumentationAttributes ctx, TrafficStream stream, int arrayIndex) { connectionId = stream.getConnectionId(); nodeId = stream.getNodeId(); trafficStreamIndex = TrafficStreamUtils.getTrafficStreamIndex(stream); this.arrayIndex = arrayIndex; var key = PojoTrafficStreamKeyAndContext.build(nodeId, connectionId, trafficStreamIndex, tsk-> - new TestTrafficStreamsLifecycleContext(tsk)); + new TestTrafficStreamsLifecycleContext(ctx, tsk)); trafficStreamsContext = key.getTrafficStreamsContext(); key.setTrafficStreamsContext(trafficStreamsContext); } @@ -241,7 +287,7 @@ public CompletableFuture> readNextTrafficStreamChunk return CompletableFuture.failedFuture(new EOFException()); } var stream = arrayCursorTrafficSourceFactory.trafficStreamsList.get(idx); - var key = new TrafficStreamCursorKey(stream, idx); + var key = new TrafficStreamCursorKey(context, stream, idx); synchronized (pQueue) { pQueue.add(key); cursorHighWatermark = idx; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java index 69e8ad8b0..f78130cc1 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java @@ -7,6 +7,7 @@ import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.StaticAuthTransformerFactory; import java.time.Duration; @@ -28,13 +29,14 @@ public class HeaderTransformerTest { @Test public void testTransformer() throws Exception { + var context = TestContext.noTracking(); // mock object. values don't matter at all - not what we're testing final var dummyAggregatedResponse = new TransformedTargetRequestAndResponse(null, 17, null, null, HttpRequestTransformationStatus.COMPLETED, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformer = new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME); var transformingHandler = new HttpJsonTransformingConsumer(transformer, null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(context, 0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + "HoSt: " + SOURCE_CLUSTER_NAME + "\r\n" + @@ -78,7 +80,7 @@ private void runRandomPayloadWithTransformer(HttpJsonTransformingConsumer "GET / HTTP/1.1\r\n" + @@ -104,6 +106,7 @@ public void testMalformedPayloadIsPassedThrough() throws Exception { */ @Test public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exception { + var ctx = TestContext.noTracking(); var referenceStringBuilder = new StringBuilder(); // mock object. values don't matter at all - not what we're testing final var dummyAggregatedResponse = new TransformedTargetRequestAndResponse(null, 12, null, @@ -113,7 +116,7 @@ public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exce var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME, null, "[{\"JsonTransformerForOpenSearch23PlusTargetTransformerProvider\":\"\"}]"), - null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); + null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(ctx, 0)); Random r = new Random(2); var stringParts = IntStream.range(0, 1).mapToObj(i-> TestUtils.makeRandomString(r, 10)).map(o->(String)o) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index a048f571d..9331b7cbc 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -5,6 +5,7 @@ import org.opensearch.migrations.replay.datatypes.MockMetricsBuilder; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; +import org.opensearch.migrations.tracing.TestContext; import java.util.Map; import java.util.Optional; @@ -12,7 +13,8 @@ class ParsedHttpMessagesAsDictsTest { private static final PojoTrafficStreamKeyAndContext TEST_TRAFFIC_STREAM_KEY = - PojoTrafficStreamKeyAndContext.build("N","C",1, TestTrafficStreamsLifecycleContext::new); + PojoTrafficStreamKeyAndContext.build("N","C",1, + k->new TestTrafficStreamsLifecycleContext(TestContext.noTracking(), k)); ParsedHttpMessagesAsDicts makeTestData() { return makeTestData(null, null); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index 5a45babea..2040f278d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; import java.time.Duration; @@ -30,6 +31,7 @@ class RequestSenderOrchestratorTest { @Test @Tag("longTest") public void testThatSchedulingWorks() throws Exception { + var ctx = TestContext.noTracking(); var httpServer = SimpleHttpServer.makeServer(false, r -> TestHttpServerContext.makeResponse(r, Duration.ofMillis(100))); var testServerUri = httpServer.localhostEndpoint(); @@ -39,7 +41,7 @@ public void testThatSchedulingWorks() throws Exception { Instant lastEndTime = baseTime; var scheduledItems = new ArrayList>(); for (int i = 0; inew TestTrafficStreamsLifecycleContext(context, k)), 0, 0), null, null, null, null, null, null); try (var closeableLogSetup = new CloseableLogSetup()) { var consumer = new TupleParserChainConsumer(null, new ResultsToLogsConsumer()); @@ -92,10 +94,11 @@ public void testOutputterWithNulls() throws IOException { @Test public void testOutputterWithException() throws IOException { + var context = TestContext.noTracking(); var exception = new Exception(TEST_EXCEPTION_MESSAGE); var emptyTuple = new SourceTargetCaptureTuple( new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - TestTrafficStreamsLifecycleContext::new), 0, 0), + k->new TestTrafficStreamsLifecycleContext(context, k)), 0, 0), null, null, null, null, exception, null); try (var closeableLogSetup = new CloseableLogSetup()) { @@ -230,9 +233,10 @@ public void testOutputterForPost() throws IOException { @Test private void testOutputterForRequest(String requestResourceName, String expected) throws IOException { + var context = TestContext.noTracking(); var trafficStreamKey = PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - TestTrafficStreamsLifecycleContext::new); - var requestCtx = TestRequestKey.getTestConnectionRequestContext(0); + k->new TestTrafficStreamsLifecycleContext(context, k)); + var requestCtx = TestRequestKey.getTestConnectionRequestContext(context, 0); trafficStreamKey.setTrafficStreamsContext(requestCtx.getImmediateEnclosingScope()); var sourcePair = new RequestResponsePacketPair(trafficStreamKey, 0, 0); var rawRequestData = loadResourceAsBytes("/requests/raw/" + requestResourceName); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java index 8d7ad7206..85cf9c80f 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java @@ -6,6 +6,7 @@ import io.netty.util.ResourceLeakDetector; import org.junit.jupiter.api.Test; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; @@ -55,7 +56,7 @@ public void testSignatureProperlyApplied() throws Exception { "fc0e8e9a1f7697f510bfdd4d55b8612df8a0140b4210967efd87ee9cb7104362"); expectedRequestHeaders.add("X-Amz-Date", "19700101T000000Z"); - TestUtils.runPipelineAndValidate( + TestUtils.runPipelineAndValidate(TestContext.noTracking(), msg -> new SigV4Signer(mockCredentialsProvider, "es", "us-east-1", "https", () -> Clock.fixed(Instant.EPOCH, ZoneOffset.UTC)), null, stringParts, expectedRequestHeaders, diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 8a9dd0c82..c304ad717 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -17,7 +17,9 @@ import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.RootOtelContext; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -189,11 +191,12 @@ public static Tuple2 unzipRequestResponseSizes(List collat @MethodSource("loadSimpleCombinations") void generateAndTest(String testName, int bufferSize, int skipCount, List directives, List expectedSizes) throws Exception { + var context = TestContext.noTracking(); var trafficStreams = Arrays.stream(makeTrafficStreams(bufferSize, 0, new AtomicInteger(), directives)).skip(skipCount); List reconstructedTransactions = new ArrayList<>(); AtomicInteger requestsReceived = new AtomicInteger(0); - accumulateTrafficStreamsWithNewAccumulator(trafficStreams, reconstructedTransactions, requestsReceived); + accumulateTrafficStreamsWithNewAccumulator(context, trafficStreams, reconstructedTransactions, requestsReceived); var splitSizes = unzipRequestResponseSizes(expectedSizes); assertReconstructedTransactionsMatchExpectations(reconstructedTransactions, splitSizes._1, splitSizes._2); Assertions.assertEquals(requestsReceived.get(), reconstructedTransactions.size()); @@ -207,9 +210,10 @@ void generateAndTest(String testName, int bufferSize, int skipCount, * @return */ static SortedSet - accumulateTrafficStreamsWithNewAccumulator(Stream trafficStreams, - List aggregations, - AtomicInteger requestsReceived) { + accumulateTrafficStreamsWithNewAccumulator(IInstrumentationAttributes context, + Stream trafficStreams, + List aggregations, + AtomicInteger requestsReceived) { var tsIndicesReceived = new TreeSet(); CapturedTrafficToHttpTransactionAccumulator trafficAccumulator = new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, @@ -264,7 +268,8 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel trafficStreams = tsList.stream(); ; trafficStreams.forEach(ts->trafficAccumulator.accept( - new PojoTrafficStreamAndKey(ts, PojoTrafficStreamKeyAndContext.build(ts, TestTrafficStreamsLifecycleContext::new) + new PojoTrafficStreamAndKey(ts, PojoTrafficStreamKeyAndContext.build(ts, + k->new TestTrafficStreamsLifecycleContext(context, k)) ))); trafficAccumulator.close(); return tsIndicesReceived; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index bf7ecbc81..e2392a0f5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -27,6 +27,7 @@ import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import javax.net.ssl.SSLException; import java.io.IOException; @@ -122,6 +123,7 @@ private static SimpleHttpResponse makeContext(HttpFirstLine request) { @ParameterizedTest @ValueSource(booleans = {false, true}) public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Exception { + var ctx = TestContext.noTracking(); for (int i = 0; i < 3; ++i) { var testServer = testServers.get(useTls); var sslContext = !testServer.localhostEndpoint().getScheme().toLowerCase().equals("https") ? null : @@ -130,7 +132,7 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except new NioEventLoopGroup(4, new DefaultThreadFactory("test")), testServer.localhostEndpoint(), sslContext, - TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(ctx, 0)); nphc.consumeBytes((EXPECTED_REQUEST_STRING).getBytes(StandardCharsets.UTF_8)); var aggregatedResponse = nphc.finalizeRequest().get(); var responseBytePackets = aggregatedResponse.getCopyOfPackets(); @@ -147,6 +149,7 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) throws SSLException, ExecutionException, InterruptedException { + var rootCtx = TestContext.noTracking(); var testServer = testServers.get(useTls); var sslContext = !testServer.localhostEndpoint().getScheme().toLowerCase().equals("https") ? null : SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); @@ -160,7 +163,7 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) new TestFlowController(), timeShifter); for (int j=0; j<2; ++j) { for (int i = 0; i < 2; ++i) { - var ctx = TestRequestKey.getTestConnectionRequestContext("TEST_"+i, j); + var ctx = TestRequestKey.getTestConnectionRequestContext(rootCtx, "TEST_"+i, j); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, sendingFactory, ctx, Instant.now(), Instant.now(), ctx.getReplayerRequestKey(), ()->Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java index 50c2aaa44..1b0c2b3f5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java @@ -8,6 +8,7 @@ import org.opensearch.migrations.replay.TransformationLoader; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.IJsonTransformer; import org.opensearch.migrations.transform.JsonCompositeTransformer; import org.opensearch.migrations.transform.RemovingAuthTransformerFactory; @@ -21,6 +22,7 @@ class HttpJsonTransformingConsumerTest { @Test public void testPassThroughSinglePacketPost() throws Exception { + var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); @@ -28,7 +30,7 @@ public void testPassThroughSinglePacketPost() throws Exception { new HttpJsonTransformingConsumer(new TransformationLoader() .getTransformerFactoryLoader(null), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(ctx, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -43,13 +45,14 @@ public void testPassThroughSinglePacketPost() throws Exception { @Test public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Exception { + var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(ctx, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -68,13 +71,14 @@ public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Ex @Test public void testRemoveAuthHeadersWorks() throws Exception { + var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), RemovingAuthTransformerFactory.instance, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(0)); + TestRequestKey.getTestConnectionRequestContext(ctx, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/get_withAuthHeader.txt")) { @@ -94,6 +98,7 @@ public void testRemoveAuthHeadersWorks() throws Exception { @Test public void testPartialBodyThrowsAndIsRedriven() throws Exception { + var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var complexTransformer = new JsonCompositeTransformer(new IJsonTransformer() { @@ -113,7 +118,7 @@ private void walkMaps(Object o) { }); var transformingHandler = new HttpJsonTransformingConsumer(complexTransformer, null, - testPacketCapture, TestRequestKey.getTestConnectionRequestContext(0)); + testPacketCapture, TestRequestKey.getTestConnectionRequestContext(ctx, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -130,6 +135,4 @@ private void walkMaps(Object o) { Assertions.assertInstanceOf(NettyJsonBodyAccumulateHandler.IncompleteJsonBodyException.class, returnedResponse.error); } - - } \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index e62b37245..ad05dcd40 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; @@ -12,14 +13,16 @@ public class TestRequestKey { private TestRequestKey() {} - public static final ReplayContexts.HttpTransactionContext getTestConnectionRequestContext(int replayerIdx) { - return getTestConnectionRequestContext(DEFAULT_TEST_CONNECTION, replayerIdx); + public static final ReplayContexts.HttpTransactionContext + getTestConnectionRequestContext(IInstrumentationAttributes ctx, int replayerIdx) { + return getTestConnectionRequestContext(ctx, DEFAULT_TEST_CONNECTION, replayerIdx); } - public static ReplayContexts.HttpTransactionContext getTestConnectionRequestContext(String connectionId, int replayerIdx) { + public static ReplayContexts.HttpTransactionContext + getTestConnectionRequestContext(IInstrumentationAttributes ctx, String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, - tsk -> new TestTrafficStreamsLifecycleContext(tsk)), + tsk -> new TestTrafficStreamsLifecycleContext(ctx, tsk)), 0, replayerIdx); return new ReplayContexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index 745490f8b..a61652130 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -4,6 +4,7 @@ import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.RootOtelContext; class TestTrafficStreamsLifecycleContext @@ -12,10 +13,10 @@ class TestTrafficStreamsLifecycleContext private final ITrafficStreamKey trafficStreamKey; - public TestTrafficStreamsLifecycleContext(ITrafficStreamKey tsk) { - super(new ReplayContexts.ChannelKeyContext(new RootOtelContext(), tsk)); + public TestTrafficStreamsLifecycleContext(IInstrumentationAttributes rootContext, ITrafficStreamKey tsk) { + super(new ReplayContexts.ChannelKeyContext(rootContext, tsk)); this.trafficStreamKey = tsk; - setCurrentSpan("testSpan"); + setCurrentSpan("testTrafficSpan"); } @Override @@ -30,4 +31,10 @@ public IReplayContexts.IChannelKeyContext getChannelKeyContext() { public ITrafficStreamKey getTrafficStreamKey() { return trafficStreamKey; } + + @Override + public void endSpan() { + super.endSpan(); + getLogicalEnclosingScope().close(); + } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index 3439bbf8f..077a6b03e 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -14,6 +14,7 @@ import org.opensearch.migrations.replay.datahandlers.IPacketConsumer; import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -79,7 +80,7 @@ static DiagnosticTrackableCompletableFuture chainedWriteHeadersAndD List stringParts, StringBuilder referenceStringAccumulator, IntFunction headersGenerator) { - var contentLength = stringParts.stream().mapToInt(s->s.length()).sum(); + var contentLength = stringParts.stream().mapToInt(String::length).sum(); String headers = headersGenerator.apply(contentLength) + "\r\n"; referenceStringAccumulator.append(headers); return chainedWriteHeadersAndDualWritePayloadParts(packetConsumer, stringParts, referenceStringAccumulator, headers); @@ -117,20 +118,22 @@ private static String getStringFromContent(FullHttpRequest fullRequest) throws I try (var baos = new ByteArrayOutputStream()) { var bb = fullRequest.content(); bb.readBytes(baos, bb.readableBytes()); - return new String(baos.toByteArray(), StandardCharsets.UTF_8); + return baos.toString(StandardCharsets.UTF_8); } } - static void runPipelineAndValidate(IAuthTransformerFactory authTransformer, + static void runPipelineAndValidate(IInstrumentationAttributes rootContext, + IAuthTransformerFactory authTransformer, String extraHeaders, List stringParts, DefaultHttpHeaders expectedRequestHeaders, Function expectedOutputGenerator) throws IOException, ExecutionException, InterruptedException { - runPipelineAndValidate(x -> x, + runPipelineAndValidate(rootContext, x -> x, authTransformer, extraHeaders, stringParts, expectedRequestHeaders, expectedOutputGenerator); } - static void runPipelineAndValidate(IJsonTransformer transformer, + static void runPipelineAndValidate(IInstrumentationAttributes rootContext, + IJsonTransformer transformer, IAuthTransformerFactory authTransformer, String extraHeaders, List stringParts, @@ -141,7 +144,7 @@ static void runPipelineAndValidate(IJsonTransformer transformer, var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), new AggregatedRawResponse(-1, Duration.ZERO, new ArrayList<>(), null)); var transformingHandler = new HttpJsonTransformingConsumer<>(transformer, authTransformer, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext("TEST_CONNECTION", 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, "TEST_CONNECTION", 0)); var contentLength = stringParts.stream().mapToInt(String::length).sum(); var headerString = "GET / HTTP/1.1\r\n" + From 72b1ca88a40ec5717f647fe70084d1f84458d2a9 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 22 Dec 2023 23:12:00 -0500 Subject: [PATCH 40/94] Add another scheduled span before the request is sent. I'm not adding one BETWEEN writes yet because that code is too complicated and should be simplified. I'm also not sure that that isn't going to flood the traces for limited value. Signed-off-by: Greg Schohn --- .../migrations/replay/RequestSenderOrchestrator.java | 6 +++++- .../migrations/replay/FullTrafficReplayerTest.java | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index bbfd8be7e..e03506518 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -218,10 +218,14 @@ private void scheduleSendOnConnectionReplaySession(IReplayContexts.IReplayerHttp packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); }; + var scheduledContext = new ReplayContexts.ScheduledContext(ctx); scheduleOnConnectionReplaySession(ctx.getLogicalEnclosingScope(), ctx.getReplayerRequestKey().getSourceRequestIndex(), channelFutureAndRequestSchedule, responseFuture, start, - new ChannelTask(ChannelTaskType.TRANSMIT, packetSender)); + new ChannelTask(ChannelTaskType.TRANSMIT, ()->{ + scheduledContext.close(); + packetSender.run(); + })); } private void runAfterChannelSetup(ConnectionReplaySession channelFutureAndItsFutureRequests, diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index f2bf456dc..d4fa7ff45 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -178,7 +178,7 @@ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSp chk.accept(numRequests, "httpTransaction"); chk.accept(numRequests, "transformation"); chk.accept(numRequests, "targetTransaction"); - chk.accept(numRequests, "scheduled"); + chk.accept(numRequests*2, "scheduled"); chk.accept(numRequests, "requestSending"); chk.accept(numRequests, "waitingForResponse"); chk.accept(numRequests, "tupleHandling"); From 6043eed58bb8cc361e502d0f3467e61c83e50687 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 26 Dec 2023 17:12:47 -0500 Subject: [PATCH 41/94] Move all span names into virtual interface functions so that they can be used for some metrics too. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 5 +- .../tracing/ConnectionContext.java | 3 + .../tracing/AbstractNestedSpanContext.java | 4 +- .../IScopedInstrumentationAttributes.java | 2 + .../migrations/tracing/TestContext.java | 5 +- .../netty/tracing/HttpMessageContext.java | 11 +-- .../replay/kafka/TrackingKafkaConsumer.java | 64 +++------------ .../tracing/IKafkaConsumerContexts.java | 40 ++++++++-- .../replay/tracing/IReplayContexts.java | 79 ++++++++++++++----- .../tracing/ITrafficSourceContexts.java | 29 +++++-- .../replay/tracing/KafkaConsumerContexts.java | 16 ++-- .../replay/tracing/ReplayContexts.java | 26 +++--- .../replay/tracing/TrafficSourceContexts.java | 10 ++- .../traffic/source/InputStreamOfTraffic.java | 5 +- .../TestTrafficStreamsLifecycleContext.java | 5 +- 15 files changed, 178 insertions(+), 126 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 95134f4a9..a4184edfd 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -30,11 +30,14 @@ public KafkaRecordContext(IConnectionContext enclosingScope, String topic, Strin this.topic = topic; this.recordId = recordId; this.recordSize = recordSize; - setCurrentSpan("stream_flush_called"); + setCurrentSpan(); } @Override public String getScopeName() { return "KafkaCapture"; } + @Override + public String getActivityName() { return "stream_flush_called"; } + @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder.put(TOPIC_ATTR, getTopic()) diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index b836665d0..adfa615bc 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -22,6 +22,9 @@ public class ConnectionContext implements IConnectionContext, IWithStartTime { @Getter final IInstrumentConstructor rootInstrumentationScope; + @Override + public String getActivityName() { return "captureConnection"; } + public ConnectionContext(IInstrumentConstructor rootInstrumentationScope, String connectionId, String nodeId) { this.rootInstrumentationScope = rootInstrumentationScope; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java index 241c614ca..8caaa5bff 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java @@ -26,8 +26,8 @@ public IInstrumentationAttributes getEnclosingScope() { public T getImmediateEnclosingScope() { return enclosingScope; } - protected void setCurrentSpan(String spanName) { - setCurrentSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), spanName)); + protected void setCurrentSpan() { + setCurrentSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), getActivityName())); } protected void setCurrentSpanWithNoParent(@NonNull ISpanWithParentGenerator spanGenerator) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index bbcb8d9af..8b78111ad 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -5,6 +5,8 @@ public interface IScopedInstrumentationAttributes extends IInstrumentationAttributes, AutoCloseable { + String getActivityName(); + @Override @NonNull Span getCurrentSpan(); diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index aa071bb2a..b36e3d577 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -17,12 +17,10 @@ import java.time.Duration; import java.util.Optional; -public class TestContext implements IScopedInstrumentationAttributes { +public class TestContext implements IInstrumentationAttributes { @Getter public IInstrumentConstructor rootInstrumentationScope; @Getter - public Span currentSpan; - @Getter public final InMemorySpanExporter testSpanExporter; @Getter public final InMemoryMetricExporter testMetricExporter; @@ -53,7 +51,6 @@ public TestContext(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter } var openTel = otelBuilder.build(); rootInstrumentationScope = new RootOtelContext(openTel); - currentSpan = null; } @Override public String getScopeName() { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index 5b47bcd66..b91c2a5a8 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -3,13 +3,9 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.time.Instant; @@ -56,7 +52,12 @@ public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestI this.sourceRequestIndex = sourceRequestIndex; this.startTime = Instant.now(); this.state = state; - setCurrentSpan(getSpanLabelForState(state)); + setCurrentSpan(); + } + + @Override + public String getActivityName() { + return getSpanLabelForState(state); } @Override public String getScopeName() { return "CapturingHttpHandler"; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index c73b6081a..52f3bf20d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -11,8 +11,7 @@ import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.replay.tracing.KafkaConsumerContexts; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.slf4j.event.Level; @@ -69,52 +68,6 @@ public int hashCode() { } } - public static class TouchScopeContext extends DirectNestedSpanContext { - public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { - super(enclosingScope); - setCurrentSpan("touch"); - } - - @Override - public String getScopeName() { - return IReplayContexts.KAFKA_CONSUMER_SCOPE; - } - } - - public static class PollScopeContext extends DirectNestedSpanContext { - public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { - super(enclosingScope); - setCurrentSpan("kafkaPoll"); - } - @Override - public String getScopeName() { - return IReplayContexts.KAFKA_CONSUMER_SCOPE; - } - - } - - public static class CommitScopeContext extends DirectNestedSpanContext { - public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { - super(enclosingScope); - setCurrentSpan("commit"); - } - @Override - public String getScopeName() { - return IReplayContexts.KAFKA_CONSUMER_SCOPE; - } - } - - public static class KafkaCommitScopeContext extends DirectNestedSpanContext { - public KafkaCommitScopeContext(@NonNull CommitScopeContext enclosingScope) { - super(enclosingScope); - setCurrentSpan("kafkaCommit"); - } - @Override - public String getScopeName() { - return IReplayContexts.KAFKA_CONSUMER_SCOPE; - } - } - /** * The keep-alive should already be set to a fraction of the max poll timeout for * the consumer (done outside of this class). The keep-alive tells this class how @@ -212,10 +165,10 @@ public Optional getNextRequiredTouch() { } public void touch(IInstrumentationAttributes context) { - try (var touchCtx = new TouchScopeContext(context)) { + try (var touchCtx = new KafkaConsumerContexts.TouchScopeContext(context)) { log.trace("touch() called."); pause(); - try (var pollCtx = new PollScopeContext(touchCtx)) { + try (var pollCtx = new KafkaConsumerContexts.PollScopeContext(touchCtx)) { var records = kafkaConsumer.poll(Duration.ZERO); if (!records.isEmpty()) { throw new IllegalStateException("Expected no entries once the consumer was paused. " + @@ -301,7 +254,7 @@ private Stream applyBuilder(BiFunction records; - try (var pollContext = new PollScopeContext(context)) { + try (var pollContext = new KafkaConsumerContexts.PollScopeContext(context)) { records = kafkaConsumer.poll(keepAliveInterval.dividedBy(POLL_TIMEOUT_KEEP_ALIVE_DIVISOR)); } log.atLevel(records.isEmpty()? Level.TRACE:Level.INFO) @@ -363,12 +316,12 @@ private void addKeyContextForEventualCommit(ITrafficStreamKey streamKey, KafkaCo private void safeCommit(IInstrumentationAttributes incomingContext) { HashMap nextCommitsMapCopy; - CommitScopeContext context = null; + KafkaConsumerContexts.CommitScopeContext context = null; synchronized (commitDataLock) { if (nextSetOfCommitsMap.isEmpty()) { return; } - context = new CommitScopeContext(incomingContext); + context = new KafkaConsumerContexts.CommitScopeContext(incomingContext); nextCommitsMapCopy = new HashMap<>(); nextCommitsMapCopy.putAll(nextSetOfCommitsMap); } @@ -408,11 +361,12 @@ private void safeCommit(IInstrumentationAttributes incomingContext) { } } - private static void safeCommitStatic(CommitScopeContext context, Consumer kafkaConsumer, + private static void safeCommitStatic(KafkaConsumerContexts.CommitScopeContext context, + Consumer kafkaConsumer, HashMap nextCommitsMap) { assert !nextCommitsMap.isEmpty(); log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); - try (var kafkaContext = new KafkaCommitScopeContext(context)) { + try (var kafkaContext = new KafkaConsumerContexts.KafkaCommitScopeContext(context)) { kafkaConsumer.commitSync(nextCommitsMap); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index 2759406a5..be90c201a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -1,16 +1,42 @@ package org.opensearch.migrations.replay.tracing; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface IKafkaConsumerContexts { - interface IKafkaConsumerScope extends IInstrumentationAttributes { + + class ScopeNames { + private ScopeNames() {} + public static final String KAFKA_CONSUMER_SCOPE = "TrackingKafkaConsumer"; + } + + class ActivityNames { + private ActivityNames() {} + public static final String TOUCH = "touch"; + public static final String KAFKA_POLL = "kafkaPoll"; + public static final String COMMIT = "commit"; + public static final String KAFKA_COMMIT = "kafkaCommit"; + } + + interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { + @Override + default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } + } + interface ITouchScopeContext extends IKafkaCommitScopeContext { @Override - default String getScopeName() { return IReplayContexts.KAFKA_CONSUMER_SCOPE; } + default String getActivityName() { return ActivityNames.TOUCH; } + } + interface IPollScopeContext extends IKafkaConsumerScope { + @Override + default String getActivityName() { return ActivityNames.KAFKA_POLL; } } - interface ITouchScopeContext extends IKafkaCommitScopeContext {} - interface IPollScopeContext extends IKafkaConsumerScope {} - interface ICommitScopeContext extends IKafkaConsumerScope {} + interface ICommitScopeContext extends IKafkaConsumerScope { + @Override + default String getActivityName() { return ActivityNames.COMMIT; } + } - interface IKafkaCommitScopeContext extends IKafkaConsumerScope {} + interface IKafkaCommitScopeContext extends IKafkaConsumerScope { + @Override + default String getActivityName() { return ActivityNames.KAFKA_COMMIT; } + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 57957fe3e..22a2dfa45 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -12,15 +12,39 @@ public class IReplayContexts { - public static final String KAFKA_RECORD_SCOPE = "KafkaRecord"; - public static final String TRAFFIC_STREAM_LIFETIME_SCOPE = "TrafficStreamLifetime"; - public static final String ACCUMULATOR_SCOPE = "Accumulator"; - public static final String KAFKA_CONSUMER_SCOPE = "TrackingKafkaConsumer"; - public static final String HTTP_TRANSFORMER_SCOPE = "HttpTransformer"; - public static final String REQUEST_SENDER_SCOPE = "RequestSender"; - public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; + public static class ScopeNames { + private ScopeNames() {} + + public static final String KAFKA_RECORD_SCOPE = "KafkaRecord"; + public static final String TRAFFIC_STREAM_LIFETIME_SCOPE = "TrafficStreamLifetime"; + public static final String ACCUMULATOR_SCOPE = "Accumulator"; + public static final String HTTP_TRANSFORMER_SCOPE = "HttpTransformer"; + public static final String REQUEST_SENDER_SCOPE = "RequestSender"; + public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; + } + + public static class ActivityNames { + private ActivityNames() {} + + public static final String CHANNEL = "channel"; + public static final String RECORD_LIFETIME = "recordLifetime"; + public static final String TRAFFIC_STREAM_LIFETIME = "trafficStreamLifetime"; + public static final String HTTP_TRANSACTION = "httpTransaction"; + public static final String ACCUMULATING_REQUEST = "accumulatingRequest"; + public static final String ACCUMULATING_RESPONSE = "accumulatingResponse"; + public static final String TRANSFORMATION = "transformation"; + public static final String SCHEDULED = "scheduled"; + public static final String TARGET_TRANSACTION = "targetTransaction"; + public static final String REQUEST_SENDING = "requestSending"; + public static final String RECEIVING_REQUEST = "receivingRequest"; + public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; + public static final String TUPLE_HANDLING = "tupleHandling"; + } public interface IChannelKeyContext extends IConnectionContext { + @Override + default String getActivityName() { return ActivityNames.CHANNEL; } + // do not add this as a property // because its components are already being added in the IConnectionContext implementation ISourceTrafficChannelKey getChannelKey(); @@ -35,12 +59,14 @@ default String getNodeId() { } public interface IKafkaRecordContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { @Override + default String getActivityName() { return ActivityNames.RECORD_LIFETIME; } + static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); String getRecordId(); - default String getScopeName() { return KAFKA_RECORD_SCOPE; } + default String getScopeName() { return ScopeNames.KAFKA_RECORD_SCOPE; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { return IScopedInstrumentationAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); } @@ -48,9 +74,10 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { public interface ITrafficStreamsLifecycleContext extends IChannelKeyContext, IWithTypedEnclosingScope { + default String getActivityName() { return ActivityNames.TRAFFIC_STREAM_LIFETIME; } ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); - default String getScopeName() { return TRAFFIC_STREAM_LIFETIME_SCOPE; } + default String getScopeName() { return ScopeNames.TRAFFIC_STREAM_LIFETIME_SCOPE; } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -60,10 +87,12 @@ public interface IReplayerHttpTransactionContext extends IHttpTransactionContext, IChannelKeyContext, IWithTypedEnclosingScope { static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); + default String getActivityName() { return ActivityNames.HTTP_TRANSACTION; } + UniqueReplayerRequestKey getReplayerRequestKey(); IChannelKeyContext getChannelKeyContext(); - @Override default String getScopeName() { return ACCUMULATOR_SCOPE; } + @Override default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -85,46 +114,56 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { public interface IRequestAccumulationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return ACCUMULATOR_SCOPE; } + default String getActivityName() { return ActivityNames.ACCUMULATING_REQUEST; } + + default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } } public interface IResponseAccumulationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return ACCUMULATOR_SCOPE; } + default String getActivityName() { return ActivityNames.ACCUMULATING_RESPONSE; } + default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } } public interface IRequestTransformationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return HTTP_TRANSFORMER_SCOPE; } + default String getActivityName() { return ActivityNames.TRANSFORMATION; } + default String getScopeName() { return ScopeNames.HTTP_TRANSFORMER_SCOPE; } } public interface IScheduledContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return REQUEST_SENDER_SCOPE; } + default String getActivityName() { return ActivityNames.SCHEDULED; } + default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface ITargetRequestContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return REQUEST_SENDER_SCOPE; } + default String getActivityName() { return ActivityNames.TARGET_TRANSACTION; } + default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IRequestSendingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return REQUEST_SENDER_SCOPE; } + default String getActivityName() { return ActivityNames.REQUEST_SENDING; } + default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IWaitingForHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return REQUEST_SENDER_SCOPE; } + default String getActivityName() { return ActivityNames.WAITING_FOR_RESPONSE; } + default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IReceivingHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return REQUEST_SENDER_SCOPE; } + default String getActivityName() { return ActivityNames.RECEIVING_REQUEST; } + default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface ITupleHandlingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getScopeName() { return TRAFFIC_REPLAYER_SCOPE; } + default String getActivityName() { return ActivityNames.TUPLE_HANDLING; } + default String getScopeName() { return ScopeNames.TRAFFIC_REPLAYER_SCOPE; } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java index eaef35eea..717b22137 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -1,16 +1,33 @@ package org.opensearch.migrations.replay.tracing; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface ITrafficSourceContexts { - String TELEMETRY_SCOPE_NAME = "BlockingTrafficSource"; + class ScopeNames { + private ScopeNames() {} + public static final String TRAFFIC_SCOPE = "BlockingTrafficSource"; + } + + class ActivityNames { + private ActivityNames() {} + public static final String READ_NEXT_TRAFFIC_CHUNK = "readNextTrafficStreamChunk"; + public static final String BACK_PRESSURE_BLOCK = "backPressureBlock"; + public static final String WAIT_FOR_NEXT_BACK_PRESSURE_CHECK = "waitForNextBackPressureCheck"; + } interface ITrafficSourceContext extends IScopedInstrumentationAttributes { @Override - default String getScopeName() { return TELEMETRY_SCOPE_NAME; } + default String getScopeName() { return ScopeNames.TRAFFIC_SCOPE; } + } + interface IReadChunkContext extends ITrafficSourceContext { + @Override + default String getActivityName() { return ActivityNames.READ_NEXT_TRAFFIC_CHUNK; } + } + interface IBackPressureBlockContext extends ITrafficSourceContext { + @Override + default String getActivityName() { return ActivityNames.BACK_PRESSURE_BLOCK; } + } + interface IWaitForNextSignal extends ITrafficSourceContext { + default String getActivityName() { return ActivityNames.WAIT_FOR_NEXT_BACK_PRESSURE_CHECK; } } - interface IReadChunkContext extends ITrafficSourceContext {} - interface IBackPressureBlockContext extends ITrafficSourceContext {} - interface IWaitForNextSignal extends ITrafficSourceContext {} } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 289412ae9..dd319a881 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -6,12 +6,15 @@ import org.opensearch.migrations.tracing.IInstrumentationAttributes; public class KafkaConsumerContexts { + + private KafkaConsumerContexts() {} + public static class TouchScopeContext extends DirectNestedSpanContext implements IKafkaConsumerContexts.ITouchScopeContext { public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan("touch"); + setCurrentSpan(); } } @@ -19,7 +22,7 @@ public static class PollScopeContext extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IKafkaConsumerContexts.IKafkaCommitScopeContext { - public KafkaCommitScopeContext(@NonNull TrackingKafkaConsumer.CommitScopeContext enclosingScope) { + + public KafkaCommitScopeContext(@NonNull KafkaConsumerContexts.CommitScopeContext enclosingScope) { super(enclosingScope); - setCurrentSpan("kafkaCommit"); + setCurrentSpan(); } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 08d996d2e..ef25e3092 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -22,7 +22,7 @@ public static class ChannelKeyContext extends AbstractNestedSpanContext extends DirectNestedSpanContext implements ITrafficSourceContexts.IReadChunkContext { public ReadChunkContext(T enclosingScope) { super(enclosingScope); - setCurrentSpan("readNextTrafficStreamChunk"); + setCurrentSpan(); } } @@ -22,16 +24,16 @@ public static class BackPressureBlockContext { public BackPressureBlockContext(@NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { super(enclosingScope); - setCurrentSpan("backPressureBlock"); + setCurrentSpan(); } } public static class WaitForNextSignal extends DirectNestedSpanContext - implements ITrafficSourceContexts.IReadChunkContext { + implements ITrafficSourceContexts.IWaitForNextSignal { public WaitForNextSignal(@NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { super(enclosingScope); - setCurrentSpan("waitForNextBackPressureCheck"); + setCurrentSpan(); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index fe37de8bd..72b999044 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -40,9 +40,12 @@ private static class IOSTrafficStreamContext public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, ITrafficStreamKey tsk) { super(ctx); this.trafficStreamKey = tsk; - setCurrentSpan("trafficStreamLifecycle"); + setCurrentSpan(); } + @Override + public String getActivityName() { return "trafficStreamLifecycle"; } + @Override public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getImmediateEnclosingScope(); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index a61652130..d8e2da3cf 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -5,7 +5,6 @@ import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.RootOtelContext; class TestTrafficStreamsLifecycleContext extends DirectNestedSpanContext @@ -16,11 +15,13 @@ class TestTrafficStreamsLifecycleContext public TestTrafficStreamsLifecycleContext(IInstrumentationAttributes rootContext, ITrafficStreamKey tsk) { super(new ReplayContexts.ChannelKeyContext(rootContext, tsk)); this.trafficStreamKey = tsk; - setCurrentSpan("testTrafficSpan"); + setCurrentSpan(); } @Override public String getScopeName() { return "testScope"; } + @Override + public String getActivityName() { return "testTrafficSpan"; } @Override public IReplayContexts.IChannelKeyContext getChannelKeyContext() { From 37b99eb5b8b36136cf89cfafd4a6e890bd2972f4 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 2 Jan 2024 00:35:24 -0500 Subject: [PATCH 42/94] Pass more contexts, make contexts able to express more metrics, and emit more. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 6 +- .../tracing/AbstractNestedSpanContext.java | 15 +- .../IScopedInstrumentationAttributes.java | 13 ++ .../tracing/IWithTypedEnclosingScope.java | 2 +- ...nditionallyReliableLoggingHttpHandler.java | 2 +- .../netty/LoggingHttpHandler.java | 6 +- .../netty/tracing/HttpMessageContext.java | 3 +- .../migrations/replay/Accumulation.java | 5 +- ...edTrafficToHttpTransactionAccumulator.java | 20 +-- .../replay/ClientConnectionPool.java | 2 +- .../replay/RequestResponsePacketPair.java | 4 +- .../replay/RequestSenderOrchestrator.java | 8 +- .../migrations/replay/TrafficReplayer.java | 4 +- .../NettyPacketToHttpConsumer.java | 62 +++++--- .../http/HttpJsonTransformingConsumer.java | 10 +- ...dHttpRequestPreliminaryConvertHandler.java | 21 ++- .../http/NettyJsonBodyAccumulateHandler.java | 7 +- .../http/RequestPipelineOrchestrator.java | 20 ++- .../http/helpers/LastHttpContentListener.java | 22 +++ .../http/helpers/ReadMeteringingHandler.java | 24 ++++ .../http/helpers/WriteMeteringHandler.java | 27 ++++ .../kafka/KafkaTrafficCaptureSource.java | 9 +- .../replay/kafka/TrackingKafkaConsumer.java | 2 + .../TrafficStreamKeyWithKafkaRecordId.java | 13 +- .../replay/netty/BacksideSnifferHandler.java | 10 +- .../tracing/IKafkaConsumerContexts.java | 16 +++ .../replay/tracing/IReplayContexts.java | 71 +++++++++- .../replay/tracing/KafkaConsumerContexts.java | 41 +++++- .../replay/tracing/ReplayContexts.java | 134 +++++++++++++++--- .../replay/tracing/TrafficSourceContexts.java | 6 +- .../traffic/source/BlockingTrafficSource.java | 4 +- .../traffic/source/InputStreamOfTraffic.java | 2 +- ...xpiringTrafficStreamMapSequentialTest.java | 2 +- ...ExpiringTrafficStreamMapUnorderedTest.java | 2 +- .../replay/ResultsToLogsConsumerTest.java | 3 +- .../kafka/KafkaTrafficCaptureSourceTest.java | 9 +- .../migrations/replay/TestRequestKey.java | 5 +- .../TestTrafficStreamsLifecycleContext.java | 6 +- 38 files changed, 474 insertions(+), 144 deletions(-) create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/LastHttpContentListener.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index a4184edfd..559d0f877 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -2,12 +2,8 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.trace.Span; -import lombok.AllArgsConstructor; import lombok.Getter; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithStartTime; @@ -30,7 +26,7 @@ public KafkaRecordContext(IConnectionContext enclosingScope, String topic, Strin this.topic = topic; this.recordId = recordId; this.recordSize = recordSize; - setCurrentSpan(); + initializeSpan(); } @Override public String getScopeName() { return "KafkaCapture"; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java index 8caaa5bff..7ed3ea46b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java @@ -26,21 +26,12 @@ public IInstrumentationAttributes getEnclosingScope() { public T getImmediateEnclosingScope() { return enclosingScope; } - protected void setCurrentSpan() { - setCurrentSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), getActivityName())); + protected void initializeSpan() { + initializeSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), getActivityName())); } - protected void setCurrentSpanWithNoParent(@NonNull ISpanWithParentGenerator spanGenerator) { - assert enclosingScope == null; - setCurrentSpan(spanGenerator.apply(getPopulatedAttributes(), null)); - } - - protected void setCurrentSpan(@NonNull Span s) { + public void initializeSpan(@NonNull Span s) { assert currentSpan == null : "only expect to set the current span once"; currentSpan = s; } - - public void close() { - endSpan(); - } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 8b78111ad..42444cfeb 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -14,7 +14,20 @@ default void endSpan() { getCurrentSpan().end(); } + default String getEndOfScopeMetricName() { + return getActivityName() + "Count"; + } + default String getEndOfScopeDurationMetricName() { + return getActivityName() + "Duration"; + } + + default void sendMeterEventsForEnd() { + meterIncrementEvent(getEndOfScopeMetricName()); + meterHistogramMicros(getEndOfScopeDurationMetricName()); + } + default void close() { endSpan(); + sendMeterEventsForEnd(); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java index 4cad5302a..1802e9649 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java @@ -1,5 +1,5 @@ package org.opensearch.migrations.tracing; -public interface IWithTypedEnclosingScope { +public interface IWithTypedEnclosingScope extends IInstrumentationAttributes { T getLogicalEnclosingScope(); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index b33c2966f..76c7c3b0b 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -39,7 +39,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob messageContext.meterIncrementEvent(t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); messageContext.meterHistogramMicros( t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro"); - messageContext.endSpan(); + messageContext.endSpan(); // TODO - make this meter on create/close if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 70184db44..2c2d7409a 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -219,7 +219,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (messageContext.getState() == HttpMessageContext.HttpTransactionState.RESPONSE) { - messageContext.endSpan(); + messageContext.endSpan(); // TODO - make this meter on create/close rotateNextMessageContext(HttpMessageContext.HttpTransactionState.REQUEST); } var timestamp = Instant.now(); @@ -247,7 +247,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (requestParsingHandler.haveParsedFullRequest) { - messageContext.endSpan(); + messageContext.endSpan(); // TODO - make this meter on create/close var httpRequest = requestParsingHandler.resetCurrentRequest(); captureState.liveReadObservationsInOffloader = false; captureState.advanceStateModelIntoResponseGather(); @@ -270,7 +270,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (messageContext.getState() != HttpMessageContext.HttpTransactionState.RESPONSE) { - messageContext.endSpan(); + messageContext.endSpan(); // TODO - make this meter on create/close rotateNextMessageContext(HttpMessageContext.HttpTransactionState.RESPONSE); } var bb = (ByteBuf) msg; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index b91c2a5a8..65e13c8d5 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -1,6 +1,5 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; -import io.opentelemetry.api.trace.Span; import lombok.Getter; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; @@ -52,7 +51,7 @@ public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestI this.sourceRequestIndex = sourceRequestIndex; this.startTime = Instant.now(); this.state = state; - setCurrentSpan(); + initializeSpan(); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index f0e4cd9c3..99cf9d531 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -47,11 +47,12 @@ public Accumulation(@NonNull ITrafficStreamKey trafficChannelKey, dropObservationsLeftoverFromPrevious ? State.IGNORING_LAST_REQUEST : State.WAITING_FOR_NEXT_READ_CHUNK; } - public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey forTrafficStreamKey) { + public RequestResponsePacketPair getOrCreateTransactionPair(ITrafficStreamKey forTrafficStreamKey, + Instant originTimestamp) { if (rrPair != null) { return rrPair; } - this.rrPair = new RequestResponsePacketPair(forTrafficStreamKey, + this.rrPair = new RequestResponsePacketPair(forTrafficStreamKey, originTimestamp, startingSourceRequestIndex, getIndexOfCurrentRequest()); //this.rrPair.getRequestContext() return rrPair; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index e47989c9a..09c29baa6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -104,14 +104,14 @@ private static class SpanWrappingAccumulationCallbacks { private final AccumulationCallbacks underlying; public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { - requestCtx.endSpan(); + requestCtx.close(); underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), requestCtx.getLogicalEnclosingScope(), request); } public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @NonNull RequestResponsePacketPair rrpp) { - rrpp.getResponseContext().endSpan(); + rrpp.getResponseContext().close(); underlying.onFullDataReceived(key, rrpp.getHttpTransactionContext(), rrpp); } @@ -121,7 +121,8 @@ public void onConnectionClose(@NonNull Accumulation accum, @NonNull List trafficStreamKeysBeingHeld) { var tsCtx = accum.trafficChannelKey.getTrafficStreamsContext(); underlying.onConnectionClose(accum.trafficChannelKey, - accum.numberOfResets.get(), tsCtx, status, when, trafficStreamKeysBeingHeld); + accum.numberOfResets.get(), tsCtx.getLogicalEnclosingScope(), + status, when, trafficStreamKeysBeingHeld); } public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, @@ -132,7 +133,7 @@ public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStat public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) { var tsCtx = tsk.getTrafficStreamsContext(); - underlying.onTrafficStreamIgnored(tsk, tsk.getTrafficStreamsContext()); + underlying.onTrafficStreamIgnored(tsk, tsk.getTrafficStreamsContext().getLogicalEnclosingScope()); } }; @@ -259,8 +260,10 @@ private static List getTrafficStreamsHeldByAccum(Accumulation TrafficObservation observation, @NonNull ITrafficStreamKey trafficStreamKey, Instant timestamp) { + var originTimestamp = TrafficStreamUtils.instantFromProtoTimestamp(observation.getTs()); if (observation.hasClose()) { - accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); + accum.getOrCreateTransactionPair(trafficStreamKey, originTimestamp) + .holdTrafficStream(trafficStreamKey); var heldTrafficStreams = getTrafficStreamsHeldByAccum(accum); if (rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum)) { heldTrafficStreams = List.of(); @@ -270,7 +273,7 @@ private static List getTrafficStreamsHeldByAccum(Accumulation timestamp, heldTrafficStreams); return Optional.of(CONNECTION_STATUS.CLOSED); } else if (observation.hasConnectionException()) { - accum.getOrCreateTransactionPair(trafficStreamKey).holdTrafficStream(trafficStreamKey); + accum.getOrCreateTransactionPair(trafficStreamKey, originTimestamp).holdTrafficStream(trafficStreamKey); rotateAccumulationIfNecessary(trafficStreamKey.getConnectionId(), accum); exceptionConnectionCounter.incrementAndGet(); accum.resetForNextRequest(); @@ -291,11 +294,12 @@ private Optional handleObservationForReadState(@NonNull Accum } var connectionId = trafficStreamKey.getConnectionId(); + var originTimestamp = TrafficStreamUtils.instantFromProtoTimestamp(observation.getTs()); if (observation.hasRead()) { if (!accum.hasRrPair()) { requestCounter.incrementAndGet(); } - var rrPair = accum.getOrCreateTransactionPair(trafficStreamKey); + var rrPair = accum.getOrCreateTransactionPair(trafficStreamKey, originTimestamp); log.atTrace().setMessage(() -> "Adding request data for accum[" + connectionId + "]=" + accum).log(); rrPair.addRequestData(timestamp, observation.getRead().getData().toByteArray()); log.atTrace().setMessage(() -> "Added request data for accum[" + connectionId + "]=" + accum).log(); @@ -304,7 +308,7 @@ private Optional handleObservationForReadState(@NonNull Accum handleEndOfRequest(accum); } else if (observation.hasReadSegment()) { log.atTrace().setMessage(()->"Adding request segment for accum[" + connectionId + "]=" + accum).log(); - var rrPair = accum.getOrCreateTransactionPair(trafficStreamKey); + var rrPair = accum.getOrCreateTransactionPair(trafficStreamKey, originTimestamp); if (rrPair.requestData == null) { rrPair.requestData = new HttpMessageAndTimestamp.Request(timestamp); requestCounter.incrementAndGet(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index 5656dbdf3..8f147a082 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -15,7 +15,6 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; @@ -178,6 +177,7 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte .thenAccept(cf-> { cf.channel().close() .addListener(closeFuture -> { + channelAndFutureWork.getChannelContext().onTargetConnectionClosed(); if (closeFuture.isSuccess()) { channelClosedFuture.future.complete(channelAndFutureWork.getInnerChannelFuture().channel()); } else { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 3298ca2bd..1866eae3b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -35,14 +35,14 @@ public enum ReconstructionStatus { // or just leave this null, in which case, the context from the trafficStreamKey should be used private IScopedInstrumentationAttributes requestOrResponseAccumulationContext; - public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, + public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, Instant sourceTimestamp, int startingSourceRequestIndex, int indexOfCurrentRequest) { this.firstTrafficStreamKeyForRequest = startingAtTrafficStreamKey; var requestKey = new UniqueReplayerRequestKey(startingAtTrafficStreamKey, startingSourceRequestIndex, indexOfCurrentRequest); var httpTransactionContext = new ReplayContexts.HttpTransactionContext( startingAtTrafficStreamKey.getTrafficStreamsContext(), - requestKey); + requestKey, sourceTimestamp); requestOrResponseAccumulationContext = new ReplayContexts.RequestAccumulationContext(httpTransactionContext); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index e03506518..ab8338247 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -44,7 +44,7 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); log.atDebug().setMessage(()->"Scheduling work for "+ctx.getConnectionId()+" at time "+timestamp).log(); - var scheduledContext = new ReplayContexts.ScheduledContext(ctx); + var scheduledContext = new ReplayContexts.ScheduledContext(ctx, timestamp); // this method doesn't use the scheduling that scheduleRequest and scheduleClose use because // doing work associated with a connection is considered to be preprocessing work independent // of the underlying network connection itself, so it's fair to be able to do this without @@ -72,8 +72,8 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final aggregated response"); log.atDebug().setMessage(()->"Scheduling request for "+requestKey+" at start time "+start).log(); - return asynchronouslyInvokeRunnableToSetupFuture( - ctx, requestKey.getReplayerRequestIndex(), false, finalTunneledResponse, + return asynchronouslyInvokeRunnableToSetupFuture(ctx.getLogicalEnclosingScope(), + requestKey.getReplayerRequestIndex(), false, finalTunneledResponse, channelFutureAndRequestSchedule-> scheduleSendOnConnectionReplaySession(ctx, channelFutureAndRequestSchedule, finalTunneledResponse, start, interval, packets)); } @@ -218,7 +218,7 @@ private void scheduleSendOnConnectionReplaySession(IReplayContexts.IReplayerHttp packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); }; - var scheduledContext = new ReplayContexts.ScheduledContext(ctx); + var scheduledContext = new ReplayContexts.ScheduledContext(ctx, start); scheduleOnConnectionReplaySession(ctx.getLogicalEnclosingScope(), ctx.getReplayerRequestKey().getSourceRequestIndex(), channelFutureAndRequestSchedule, responseFuture, start, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 2c0a2b546..b82a3b4dc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -679,7 +679,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, .log(); throw e; } finally { - httpContext.endSpan(); + httpContext.close(); requestToFinalWorkFuturesMap.remove(requestKey); log.trace("removed rrPair.requestData to " + "targetTransactionInProgressMap for " + @@ -707,7 +707,7 @@ private void commitTrafficStreams(IInstrumentationAttributes context, List trafficStreamKeysBeingHeld, boolean shouldCommit) { if (shouldCommit && trafficStreamKeysBeingHeld != null) { for (var tsk : trafficStreamKeysBeingHeld) { - tsk.getTrafficStreamsContext().endSpan(); + tsk.getTrafficStreamsContext().close(); trafficCaptureSource.commitTrafficStream(context, tsk); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 6220a820e..a7cc705da 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -22,15 +22,17 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; +import org.opensearch.migrations.replay.datahandlers.http.helpers.WriteMeteringHandler; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.net.URI; import java.time.Instant; @@ -56,8 +58,7 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - IReplayContexts.ITargetRequestContext parentContext; - IScopedInstrumentationAttributes currentRequestContext; + IWithTypedEnclosingScope currentRequestContextUnion; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, ReplayContexts.HttpTransactionContext httpTransactionContext) { @@ -66,8 +67,8 @@ public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri } public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IReplayContexts.IReplayerHttpTransactionContext ctx) { - this.parentContext = new ReplayContexts.TargetRequestContext(ctx); - this.currentRequestContext = new ReplayContexts.RequestSendingContext(this.parentContext); + var parentContext = new ReplayContexts.TargetRequestContext(ctx); + this.setCurrentRequestContext(new ReplayContexts.RequestSendingContext(parentContext)); responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), @@ -90,8 +91,24 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IReplayContexts }); } - public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, SslContext sslContext, - URI serverUri, IReplayContexts.IChannelKeyContext channelKeyContext) { + private & + IScopedInstrumentationAttributes> + void setCurrentRequestContext(T requestSendingContext) { + currentRequestContextUnion = requestSendingContext; + } + + private IScopedInstrumentationAttributes getCurrentRequestSpan() { + return (IScopedInstrumentationAttributes) currentRequestContextUnion; + } + + public IReplayContexts.ITargetRequestContext getParentContext() { + return currentRequestContextUnion.getLogicalEnclosingScope(); + } + + public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, + SslContext sslContext, + URI serverUri, + IReplayContexts.IChannelKeyContext channelKeyContext) { String host = serverUri.getHost(); int port = serverUri.getPort(); log.atTrace().setMessage(()->"Active - setting up backend connection to " + host + ":" + port).log(); @@ -107,6 +124,7 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup var rval = new DefaultChannelPromise(outboundChannelFuture.channel()); outboundChannelFuture.addListener((ChannelFutureListener) connectFuture -> { if (connectFuture.isSuccess()) { + channelKeyContext.onTargetConnectionCreated(); var pipeline = connectFuture.channel().pipeline(); pipeline.removeFirst(); log.atTrace().setMessage(()-> channelKeyContext.getChannelKey() + @@ -155,12 +173,17 @@ private void activateChannelForThisConsumer() { throw new IllegalStateException("Channel " + channel + "is being used elsewhere already!"); } var pipeline = channel.pipeline(); - addLoggingHandler(pipeline, "B"); - pipeline.addLast(new BacksideSnifferHandler(responseBuilder, ()->{ - this.currentRequestContext.close(); - this.currentRequestContext = new ReplayContexts.ReceivingHttpResponseContext(this.parentContext); - + // add this size counter BEFORE TLS? + pipeline.addFirst(new ReadMeteringingHandler(size->{ + if (!(this.currentRequestContextUnion instanceof IReplayContexts.IRequestSendingContext)) { + this.getCurrentRequestSpan().close(); + this.setCurrentRequestContext(new ReplayContexts.ReceivingHttpResponseContext(getParentContext())); + } + getParentContext().onBytesReceived(size); })); + pipeline.addFirst(new WriteMeteringHandler(size->getParentContext().onBytesSent(size))); + addLoggingHandler(pipeline, "B"); + pipeline.addLast(new BacksideSnifferHandler(responseBuilder)); addLoggingHandler(pipeline, "C"); pipeline.addLast(new HttpResponseDecoder()); addLoggingHandler(pipeline, "D"); @@ -190,8 +213,8 @@ private void deactivateChannel() { channel.config().setAutoRead(false); log.atDebug().setMessage(() -> "Reset the pipeline back to: " + pipeline).log(); } finally { - currentRequestContext.close(); - parentContext.close(); + getCurrentRequestSpan().close(); + getParentContext().close(); } } @@ -216,7 +239,7 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa } private IReplayContexts.IReplayerHttpTransactionContext httpContext() { - return parentContext.getLogicalEnclosingScope(); + return getParentContext().getLogicalEnclosingScope(); } private DiagnosticTrackableCompletableFuture @@ -224,7 +247,6 @@ private IReplayContexts.IReplayerHttpTransactionContext httpContext() { final var completableFuture = new DiagnosticTrackableCompletableFuture(new CompletableFuture<>(), ()->"CompletableFuture that will wait for the netty future to fill in the completion value"); final int readableBytes = packetData.readableBytes(); - this.currentRequestContext.meterIncrementEvent("readBytes", packetData.readableBytes()); channel.writeAndFlush(packetData) .addListener((ChannelFutureListener) future -> { Throwable cause = null; @@ -272,8 +294,8 @@ private IReplayContexts.IReplayerHttpTransactionContext httpContext() { public DiagnosticTrackableCompletableFuture finalizeRequest() { var ff = activeChannelFuture.getDeferredFutureThroughHandle((v,t)-> { - this.currentRequestContext.close(); - this.currentRequestContext = new ReplayContexts.WaitingForHttpResponseContext(parentContext); + this.getCurrentRequestSpan().close(); + this.setCurrentRequestContext(new ReplayContexts.WaitingForHttpResponseContext(getParentContext())); var future = new CompletableFuture(); var rval = new DiagnosticTrackableCompletableFuture(future, @@ -281,9 +303,7 @@ private IReplayContexts.IReplayerHttpTransactionContext httpContext() { if (t == null) { var responseWatchHandler = (BacksideHttpWatcherHandler) channel.pipeline().get(BACKSIDE_HTTP_WATCHER_HANDLER_NAME); - responseWatchHandler.addCallback(value -> { - future.complete(value); - }); + responseWatchHandler.addCallback(future::complete); } else { future.complete(responseBuilder.addErrorCause(t).build()); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 65bfc82e3..a7af34fb9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -9,7 +9,6 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.Utils; @@ -76,7 +75,7 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); channel = new EmbeddedChannel(); pipelineOrchestrator = new RequestPipelineOrchestrator<>(chunkSizes, transformedPacketReceiver, - authTransformerFactory, httpTransactionContext); + authTransformerFactory, transformationContext); pipelineOrchestrator.addInitialHandlers(channel.pipeline(), transformer); } @@ -137,11 +136,9 @@ public DiagnosticTrackableCompletableFuture { - transformationContext.endSpan(); - transformationContext.meterIncrementEvent(t != null ? "transformRequestFailed" : - "transformRequestSuccess"); - transformationContext.meterHistogramMicros("transformationDuration"); + transformationContext.close(); if (t != null) { + transformationContext.onTransformFailure(); t = unwindPossibleCompletionException(t); if (t instanceof NoContentException) { return redriveWithoutTransformation(offloadingHandler.packetReceiver, t); @@ -153,6 +150,7 @@ public DiagnosticTrackableCompletableFuture extends Channel final IJsonTransformer transformer; final List> chunkSizes; final String diagnosticLabel; - private IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext; + private IReplayContexts.IRequestTransformationContext httpTransactionContext; static final MetricsLogger metricsLogger = new MetricsLogger("NettyDecodedHttpRequestPreliminaryConvertHandler"); public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, - IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { + IReplayContexts.IRequestTransformationContext httpTransactionContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; @@ -42,20 +43,16 @@ public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transfo } @Override - public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + public void channelRead(@NonNull ChannelHandlerContext ctx, @NonNull Object msg) throws Exception { if (msg instanceof HttpRequest) { + httpTransactionContext.onHeaderParse(); var request = (HttpRequest) msg; - log.info(new StringBuilder(diagnosticLabel) - .append(" parsed request: ") - .append(request.method()) - .append(" ") - .append(request.uri()) - .append(" ") - .append(request.protocolVersion().text()) - .toString()); + log.atInfo().setMessage(()-> diagnosticLabel + " parsed request: " + + request.method() + " " + request.uri() + " " + request.protocolVersion().text()).log(); metricsLogger.atSuccess(MetricsEvent.CAPTURED_REQUEST_PARSED_TO_HTTP) .setAttribute(MetricsAttributeKey.REQUEST_ID, httpTransactionContext) - .setAttribute(MetricsAttributeKey.CONNECTION_ID, httpTransactionContext.getConnectionId()) + .setAttribute(MetricsAttributeKey.CONNECTION_ID, + httpTransactionContext.getLogicalEnclosingScope().getConnectionId()) .setAttribute(MetricsAttributeKey.HTTP_METHOD, request.method()) .setAttribute(MetricsAttributeKey.HTTP_ENDPOINT, request.uri()).emit(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyJsonBodyAccumulateHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyJsonBodyAccumulateHandler.java index be256a0d6..7ee13a88f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyJsonBodyAccumulateHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyJsonBodyAccumulateHandler.java @@ -6,6 +6,7 @@ import io.netty.handler.codec.http.LastHttpContent; import lombok.SneakyThrows; import org.opensearch.migrations.replay.datahandlers.JsonAccumulator; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.JsonKeysForHttpMessage; /** @@ -18,13 +19,16 @@ */ public class NettyJsonBodyAccumulateHandler extends ChannelInboundHandlerAdapter { + private final IReplayContexts.IRequestTransformationContext context; + public static class IncompleteJsonBodyException extends NoContentException {} JsonAccumulator jsonAccumulator; HttpJsonMessageWithFaultingPayload capturedHttpJsonMessage; @SneakyThrows - public NettyJsonBodyAccumulateHandler() { + public NettyJsonBodyAccumulateHandler(IReplayContexts.IRequestTransformationContext context) { + this.context = context; this.jsonAccumulator = new JsonAccumulator(); } @@ -36,6 +40,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception var jsonObject = jsonAccumulator.consumeByteBuffer(((HttpContent)msg).content().nioBuffer()); if (jsonObject != null) { capturedHttpJsonMessage.payload().put(JsonKeysForHttpMessage.INLINED_JSON_BODY_DOCUMENT_KEY, jsonObject); + context.onJsonPayloadParseSucceeded(); ctx.fireChannelRead(capturedHttpJsonMessage); } else if (msg instanceof LastHttpContent) { throw new IncompleteJsonBodyException(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 9e0919556..589dec757 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -10,6 +10,8 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; +import org.opensearch.migrations.replay.datahandlers.http.helpers.LastHttpContentListener; +import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; @@ -42,14 +44,14 @@ public class RequestPipelineOrchestrator { public static final String HTTP_REQUEST_DECODER_NAME = "HTTP_REQUEST_DECODER"; private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; - private IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext; + private IReplayContexts.IRequestTransformationContext httpTransactionContext; @Getter final IAuthTransformerFactory authTransfomerFactory; public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, - IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { + IReplayContexts.IRequestTransformationContext httpTransactionContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : @@ -85,6 +87,7 @@ void addJsonParsingHandlers(ChannelHandlerContext ctx, void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) { pipeline.addFirst(HTTP_REQUEST_DECODER_NAME, new HttpRequestDecoder()); addLoggingHandler(pipeline, "A"); + pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.aggregateInputChunk(size))); // IN: Netty HttpRequest(1) + HttpContent(1) blocks (which may be compressed) + EndOfInput + ByteBuf // OUT: ByteBufs(1) OR Netty HttpRequest(1) + HttpJsonMessage(1) with only headers PLUS + HttpContent(1) blocks // Note1: original Netty headers are preserved so that HttpContentDecompressor can work appropriately. @@ -104,17 +107,21 @@ void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) void addContentParsingHandlers(ChannelHandlerContext ctx, IJsonTransformer transformer, IAuthTransformer.StreamingFullMessageTransformer authTransfomer) { + httpTransactionContext.onPayloadParse(); log.debug("Adding content parsing handlers to pipeline"); var pipeline = ctx.pipeline(); + pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onPayloadBytesIn(size))); // IN: Netty HttpRequest(1) + HttpJsonMessage(1) with headers + HttpContent(1) blocks (which may be compressed) // OUT: Netty HttpRequest(2) + HttpJsonMessage(1) with headers + HttpContent(2) uncompressed blocks pipeline.addLast(new HttpContentDecompressor()); + pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onUncompressedBytesIn(size))); if (transformer != null) { + httpTransactionContext.onJsonPayloadParseRequired(); log.debug("Adding JSON handlers to pipeline"); // IN: Netty HttpRequest(2) + HttpJsonMessage(1) with headers + HttpContent(2) blocks // OUT: Netty HttpRequest(2) + HttpJsonMessage(2) with headers AND payload addLoggingHandler(pipeline, "C"); - pipeline.addLast(new NettyJsonBodyAccumulateHandler()); + pipeline.addLast(new NettyJsonBodyAccumulateHandler(httpTransactionContext)); // IN: Netty HttpRequest(2) + HttpJsonMessage(2) with headers AND payload // OUT: Netty HttpRequest(2) + HttpJsonMessage(3) with headers AND payload (transformed) pipeline.addLast(new NettyJsonBodyConvertHandler(transformer)); @@ -127,9 +134,12 @@ void addContentParsingHandlers(ChannelHandlerContext ctx, pipeline.addLast(new NettyJsonContentAuthSigner(authTransfomer)); addLoggingHandler(pipeline, "G"); } + pipeline.addLast(new LastHttpContentListener(()->httpTransactionContext.onPayloadParseSuccess())); + pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onUncompressedBytesOut(size))); // IN: Netty HttpRequest(2) + HttpJsonMessage(3) with headers only + HttpContent(3) blocks // OUT: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + HttpContent(4) blocks pipeline.addLast(new NettyJsonContentCompressor()); + pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onFinalBytesOut(size))); addLoggingHandler(pipeline, "H"); // IN: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + HttpContent(4) blocks + EndOfInput // OUT: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + ByteBufs(2) @@ -143,11 +153,13 @@ void addBaselineHandlers(ChannelPipeline pipeline) { // IN: ByteBufs(2) + HttpJsonMessage(4) with headers only + HttpContent(1) (if the repackaging handlers were skipped) // OUT: ByteBufs(3) which are sized similarly to how they were received pipeline.addLast(new NettyJsonToByteBufHandler(Collections.unmodifiableList(chunkSizes))); + pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.aggregateOutputChunk(size))); // IN: ByteBufs(3) // OUT: nothing - terminal! ByteBufs are routed to the packet handler! addLoggingHandler(pipeline, "K"); pipeline.addLast(OFFLOADING_HANDLER_NAME, - new NettySendByteBufsToPacketHandlerHandler(packetReceiver, httpTransactionContext)); + new NettySendByteBufsToPacketHandlerHandler(packetReceiver, + httpTransactionContext.getLogicalEnclosingScope())); } private void addLoggingHandler(ChannelPipeline pipeline, String name) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/LastHttpContentListener.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/LastHttpContentListener.java new file mode 100644 index 000000000..d80dda43a --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/LastHttpContentListener.java @@ -0,0 +1,22 @@ +package org.opensearch.migrations.replay.datahandlers.http.helpers; + +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.LastHttpContent; + +public class LastHttpContentListener extends ChannelInboundHandlerAdapter { + + private final Runnable onLastContentReceived; + + public LastHttpContentListener(Runnable onLastContentReceived) { + this.onLastContentReceived = onLastContentReceived; + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (msg instanceof LastHttpContent) { + onLastContentReceived.run(); + } + super.channelRead(ctx, msg); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java new file mode 100644 index 000000000..507387a61 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java @@ -0,0 +1,24 @@ +package org.opensearch.migrations.replay.datahandlers.http.helpers; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.HttpContent; +import lombok.AllArgsConstructor; + +import java.util.function.IntConsumer; + +@AllArgsConstructor +public class ReadMeteringingHandler extends ChannelInboundHandlerAdapter { + private final IntConsumer sizeConsumer; + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (msg instanceof ByteBuf) { + sizeConsumer.accept(((ByteBuf)msg).readableBytes()); + } else if (msg instanceof HttpContent) { + sizeConsumer.accept(((HttpContent)msg).content().readableBytes()); + } + super.channelRead(ctx, msg); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java new file mode 100644 index 000000000..e3045a7d3 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java @@ -0,0 +1,27 @@ +package org.opensearch.migrations.replay.datahandlers.http.helpers; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelOutboundHandlerAdapter; +import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.HttpContent; + +import java.util.function.IntConsumer; + +public class WriteMeteringHandler extends ChannelOutboundHandlerAdapter { + final IntConsumer sizeConsumer; + + public WriteMeteringHandler(IntConsumer sizeConsumer) { + this.sizeConsumer = sizeConsumer; + } + + @Override + public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { + if (msg instanceof ByteBuf) { + sizeConsumer.accept(((ByteBuf)msg).readableBytes()); + } else if (msg instanceof HttpContent) { + sizeConsumer.accept(((HttpContent)msg).content().readableBytes()); + } + super.write(ctx, msg, promise); + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 87a61acd3..510318e01 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -106,7 +106,7 @@ private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { " instead of " + looseParentScope + " (of type=" + looseParentScope.getClass() + ")"); } var kafkaCtx = (ReplayContexts.KafkaRecordContext) looseParentScope; - kafkaCtx.endSpan(); + kafkaCtx.close(); channelContextManager.releaseContextFor((ReplayContexts.ChannelKeyContext) kafkaCtx.getImmediateEnclosingScope()); } @@ -211,7 +211,12 @@ public List readNextTrafficStreamSynchronously(IInstrumen log.atTrace().setMessage(()->"Parsed traffic stream #" + trafficStreamsSoFar + ": " + offsetData + " " + ts).log(); var key = new TrafficStreamKeyWithKafkaRecordId( - channelContextManager::retainOrCreateContext, ts, kafkaRecord.key(), offsetData); + tsk -> { + var channelKeyCtx = channelContextManager.retainOrCreateContext(tsk); + return new ReplayContexts.KafkaRecordContext(channelKeyCtx, kafkaRecord.key(), + kafkaRecord.serializedKeySize() + kafkaRecord.serializedValueSize()); + }, + ts, offsetData); return (ITrafficStreamWithKey) new PojoTrafficStreamAndKey(ts, key); } catch (InvalidProtocolBufferException e) { RuntimeException recordError = behavioralPolicy.onInvalidKafkaRecord(kafkaRecord, e); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index 52f3bf20d..af62e08cc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -123,6 +123,7 @@ public TrackingKafkaConsumer(@NonNull IInstrumentationAttributes globalContext, @Override public void onPartitionsRevoked(Collection partitions) { + new KafkaConsumerContexts.AsyncListeningContext(globalContext).onPartitionsRevoked(partitions); synchronized (commitDataLock) { safeCommit(globalContext); partitions.forEach(p -> { @@ -140,6 +141,7 @@ public void onPartitionsRevoked(Collection partitions) { } @Override public void onPartitionsAssigned(Collection newPartitions) { + new KafkaConsumerContexts.AsyncListeningContext(globalContext).onPartitionsAssigned(newPartitions); synchronized (commitDataLock) { consumerConnectionGeneration.incrementAndGet(); newPartitions.forEach(p -> partitionToOffsetLifecycleTrackerMap.computeIfAbsent(p.partition(), diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java index f150321b5..4d9009015 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java @@ -18,20 +18,19 @@ class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext i private final int partition; private final long offset; - TrafficStreamKeyWithKafkaRecordId(Function contextFactory, - TrafficStream trafficStream, String recordId, KafkaCommitOffsetData ok) { - this(contextFactory, trafficStream, recordId, ok.getGeneration(), ok.getPartition(), ok.getOffset()); + TrafficStreamKeyWithKafkaRecordId(Function contextFactory, + TrafficStream trafficStream, KafkaCommitOffsetData ok) { + this(contextFactory, trafficStream, ok.getGeneration(), ok.getPartition(), ok.getOffset()); } - TrafficStreamKeyWithKafkaRecordId(Function contextFactory, - TrafficStream trafficStream, String recordId, + TrafficStreamKeyWithKafkaRecordId(Function contextFactory, + TrafficStream trafficStream, int generation, int partition, long offset) { super(trafficStream); this.generation = generation; this.partition = partition; this.offset = offset; - var channelKeyContext = contextFactory.apply(this); - var kafkaContext = new ReplayContexts.KafkaRecordContext(channelKeyContext, recordId); + var kafkaContext = contextFactory.apply(this); this.setTrafficStreamsContext(new ReplayContexts.TrafficStreamsLifecycleContext(kafkaContext, this)); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java index 8dccefcc6..8c919813b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/netty/BacksideSnifferHandler.java @@ -13,13 +13,11 @@ public class BacksideSnifferHandler extends ChannelInboundHandlerAdapter { private final AggregatedRawResponse.Builder aggregatedRawResponseBuilder; - private Runnable firstByteReceivedCallback; + private static final MetricsLogger metricsLogger = new MetricsLogger("BacksideSnifferHandler"); - public BacksideSnifferHandler(AggregatedRawResponse.Builder aggregatedRawResponseBuilder, - Runnable firstByteReceivedCallback) { + public BacksideSnifferHandler(AggregatedRawResponse.Builder aggregatedRawResponseBuilder) { this.aggregatedRawResponseBuilder = aggregatedRawResponseBuilder; - this.firstByteReceivedCallback = firstByteReceivedCallback; } @Override @@ -31,10 +29,6 @@ public void channelActive(ChannelHandlerContext ctx) { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { var bb = (ByteBuf) msg; - if (firstByteReceivedCallback != null && bb.readableBytes() > 0) { - firstByteReceivedCallback.run(); - firstByteReceivedCallback = null; - } byte[] output = new byte[bb.readableBytes()]; bb.readBytes(output); aggregatedRawResponseBuilder.addResponsePacket(output); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index be90c201a..fdfcfea6e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.replay.tracing; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface IKafkaConsumerContexts { @@ -17,6 +18,15 @@ private ActivityNames() {} public static final String KAFKA_COMMIT = "kafkaCommit"; } + class MetricNames { + public static final String PARTITIONS_ASSIGNED_EVENT_COUNT = "partitionsAssigned"; + public static final String PARTITIONS_REVOKED_EVENT_COUNT = "partitionsRevoked"; + public static final String ACTIVE_PARTITIONS_ASSIGNED_COUNT = "numPartitionsAssigned"; + } + + interface IAsyncListeningContext extends IInstrumentationAttributes { + default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } + } interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { @Override default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } @@ -30,11 +40,17 @@ interface IPollScopeContext extends IKafkaConsumerScope { default String getActivityName() { return ActivityNames.KAFKA_POLL; } } + /** + * Context for the KafkaConsumer's bookkeeping around and including the commit service call + */ interface ICommitScopeContext extends IKafkaConsumerScope { @Override default String getActivityName() { return ActivityNames.COMMIT; } } + /** + * Context for ONLY the service call to Kafka to perform the commit. + */ interface IKafkaCommitScopeContext extends IKafkaConsumerScope { @Override default String getActivityName() { return ActivityNames.KAFKA_COMMIT; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 22a2dfa45..2aa1f33cf 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -10,6 +10,8 @@ import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; +import java.time.Instant; + public class IReplayContexts { public static class ScopeNames { @@ -36,11 +38,38 @@ private ActivityNames() {} public static final String SCHEDULED = "scheduled"; public static final String TARGET_TRANSACTION = "targetTransaction"; public static final String REQUEST_SENDING = "requestSending"; - public static final String RECEIVING_REQUEST = "receivingRequest"; public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; + public static final String RECEIVING_RESPONSE = "receivingResponse"; public static final String TUPLE_HANDLING = "tupleHandling"; } + public static class MetricNames { + public static final String KAFKA_RECORD_READ = "kafkaRecordsRead"; + public static final String KAFKA_BYTES_READ = "kafkaBytesRead"; + public static final String TRAFFIC_STREAMS_READ = "trafficStreamsRead"; + public static final String TRANSFORM_HEADER_PARSE = "parsedHeader"; + public static final String TRANSFORM_PAYLOAD_PARSE_REQUIRED = "parsedPayload"; + public static final String TRANSFORM_PAYLOAD_PARSE_SUCCESS = "parsedPayloadSuccess"; + public static final String TRANSFORM_JSON_REQUIRED = "transformedJsonRequired"; + public static final String TRANSFORM_JSON_SUCCEEDED = "transformedJsonSucceeded"; + public static final String TRANSFORM_PAYLOAD_BYTES_IN = "originalPayloadBytesIn"; + public static final String TRANSFORM_UNCOMPRESSED_BYTES_IN = "uncompressedBytesIn"; + public static final String TRANSFORM_UNCOMPRESSED_BYTES_OUT = "uncompressedBytesOut"; + public static final String TRANSFORM_FINAL_PAYLOAD_BYTES_OUT = "finalPayloadBytesOut"; + public static final String TRANSFORM_SUCCESS = "transformSuccess"; + public static final String TRANSFORM_SKIPPED = "transformSkipped"; + public static final String TRANSFORM_ERROR = "transformError"; + public static final String TRANSFORM_BYTES_IN = "transformBytesIn"; + public static final String TRANSFORM_BYTES_OUT = "transformBytesOut"; + public static final String TRANSFORM_CHUNKS_IN = "transformChunksIn"; + public static final String TRANSFORM_CHUNKS_OUT = "transformChunksOut"; + public static final String NETTY_SCHEDULE_LAG = "scheduleLag"; + public static final String SOURCE_TO_TARGET_REQUEST_LAG = "lagBetweenSourceAndTargetRequests"; + public static final String ACTIVE_TARGET_CONNECTIONS = "activeTargetConnections"; + public static final String BYTES_WRITTEN_TO_TARGET = "bytesWrittenToTarget"; + public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; + } + public interface IChannelKeyContext extends IConnectionContext { @Override default String getActivityName() { return ActivityNames.CHANNEL; } @@ -56,6 +85,10 @@ default String getConnectionId() { default String getNodeId() { return getChannelKey().getNodeId(); } + + void onTargetConnectionCreated(); + + void onTargetConnectionClosed(); } public interface IKafkaRecordContext @@ -73,10 +106,13 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } public interface ITrafficStreamsLifecycleContext - extends IChannelKeyContext, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TRAFFIC_STREAM_LIFETIME; } ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); + default String getConnectionId() { + return getChannelKey().getConnectionId(); + } default String getScopeName() { return ScopeNames.TRAFFIC_STREAM_LIFETIME_SCOPE; } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); @@ -84,15 +120,19 @@ default ISourceTrafficChannelKey getChannelKey() { } public interface IReplayerHttpTransactionContext - extends IHttpTransactionContext, IChannelKeyContext, IWithTypedEnclosingScope { + extends IHttpTransactionContext, IWithTypedEnclosingScope { static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); default String getActivityName() { return ActivityNames.HTTP_TRANSACTION; } UniqueReplayerRequestKey getReplayerRequestKey(); IChannelKeyContext getChannelKeyContext(); + Instant getTimeOfOriginalRequest(); @Override default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } + default String getConnectionId() { + return getChannelKey().getConnectionId(); + } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -129,6 +169,26 @@ public interface IRequestTransformationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TRANSFORMATION; } default String getScopeName() { return ScopeNames.HTTP_TRANSFORMER_SCOPE; } + + + void onHeaderParse(); + void onPayloadParse(); + + void onPayloadParseSuccess(); + + void onJsonPayloadParseRequired(); + + void onJsonPayloadParseSucceeded(); + + void onPayloadBytesIn(int inputSize); + void onUncompressedBytesIn(int inputSize); + void onUncompressedBytesOut(int inputSize); + void onFinalBytesOut(int outputSize); + void onTransformSuccess(); + void onTransformSkip(); + void onTransformFailure(); + void aggregateInputChunk(int sizeInBytes); + void aggregateOutputChunk(int sizeInBytes); } public interface IScheduledContext @@ -141,6 +201,9 @@ public interface ITargetRequestContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TARGET_TRANSACTION; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } + + void onBytesSent(int size); + void onBytesReceived(int size); } public interface IRequestSendingContext @@ -157,7 +220,7 @@ public interface IWaitingForHttpResponseContext public interface IReceivingHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getActivityName() { return ActivityNames.RECEIVING_REQUEST; } + default String getActivityName() { return ActivityNames.RECEIVING_RESPONSE; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index dd319a881..9e6dec74b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -1,20 +1,51 @@ package org.opensearch.migrations.replay.tracing; +import lombok.AllArgsConstructor; +import lombok.Getter; import lombok.NonNull; -import org.opensearch.migrations.replay.kafka.TrackingKafkaConsumer; +import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import java.util.Collection; + public class KafkaConsumerContexts { private KafkaConsumerContexts() {} + @AllArgsConstructor + public static class AsyncListeningContext implements IKafkaConsumerContexts.IAsyncListeningContext { + @Getter + @NonNull + private final IInstrumentationAttributes enclosingScope; + + @Override + public @NonNull IInstrumentConstructor getRootInstrumentationScope() { + return enclosingScope.getRootInstrumentationScope(); + } + + public void onPartitionsRevoked(Collection partitions) { + meterIncrementEvent(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT); + onParitionsAssignedChanged(partitions.size()); + } + + public void onPartitionsAssigned(Collection partitions) { + meterIncrementEvent(IKafkaConsumerContexts.MetricNames.PARTITIONS_ASSIGNED_EVENT_COUNT); + onParitionsAssignedChanged(partitions.size()); + } + + private void onParitionsAssignedChanged(int delta) { + meterDeltaEvent(IKafkaConsumerContexts.MetricNames.ACTIVE_PARTITIONS_ASSIGNED_COUNT, delta); + } + } + public static class TouchScopeContext extends DirectNestedSpanContext implements IKafkaConsumerContexts.ITouchScopeContext { public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -22,7 +53,7 @@ public static class PollScopeContext extends DirectNestedSpanContext implements IReplayContexts.IKafkaRecordContext { + final String recordId; - public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, String recordId) { + public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, String recordId, + int recordSize) { super(enclosingScope); this.recordId = recordId; - setCurrentSpan(); + initializeSpan(); + this.meterIncrementEvent(IReplayContexts.MetricNames.KAFKA_RECORD_READ); + this.meterIncrementEvent(IReplayContexts.MetricNames.KAFKA_BYTES_READ, recordSize); } @Override @@ -58,7 +74,8 @@ public TrafficStreamsLifecycleContext(IReplayContexts.IKafkaRecordContext enclos ITrafficStreamKey trafficStreamKey) { super(enclosingScope); this.trafficStreamKey = trafficStreamKey; - setCurrentSpan(); + initializeSpan(); + this.meterIncrementEvent(IReplayContexts.MetricNames.TRAFFIC_STREAMS_READ); } @Override @@ -81,12 +98,15 @@ public static class HttpTransactionContext extends IndirectNestedSpanContext implements IReplayContexts.IReplayerHttpTransactionContext { final UniqueReplayerRequestKey replayerRequestKey; + @Getter final Instant timeOfOriginalRequest; public HttpTransactionContext(IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, - UniqueReplayerRequestKey replayerRequestKey) { + UniqueReplayerRequestKey replayerRequestKey, + Instant timeOfOriginalRequest) { super(enclosingScope); this.replayerRequestKey = replayerRequestKey; - setCurrentSpan(); + this.timeOfOriginalRequest = timeOfOriginalRequest; + initializeSpan(); } public IReplayContexts.IChannelKeyContext getChannelKeyContext() { @@ -114,7 +134,7 @@ public static class RequestAccumulationContext implements IReplayContexts.IRequestAccumulationContext { public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -123,7 +143,7 @@ public static class ResponseAccumulationContext implements IReplayContexts.IResponseAccumulationContext { public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -132,16 +152,87 @@ public static class RequestTransformationContext implements IReplayContexts.IRequestTransformationContext { public RequestTransformationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); + } + + @Override + public void onHeaderParse() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_HEADER_PARSE); + } + @Override + public void onPayloadParse() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED); + } + @Override + public void onPayloadParseSuccess() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_SUCCESS); + } + @Override + public void onJsonPayloadParseRequired() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_JSON_REQUIRED); + } + @Override + public void onJsonPayloadParseSucceeded() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_JSON_SUCCEEDED); + } + @Override + public void onPayloadBytesIn(int inputSize) { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_BYTES_IN, inputSize); + } + @Override + public void onUncompressedBytesIn(int inputSize) { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_IN, inputSize); + } + @Override + public void onUncompressedBytesOut(int inputSize) { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_OUT, inputSize); + } + @Override + public void onFinalBytesOut(int inputSize) { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_FINAL_PAYLOAD_BYTES_OUT, inputSize); + } + @Override + public void onTransformSuccess() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_SUCCESS); + } + @Override + public void onTransformSkip() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_SKIPPED); + } + @Override + public void onTransformFailure() { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_ERROR); + } + @Override + public void aggregateInputChunk(int sizeInBytes) { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_BYTES_IN, sizeInBytes); + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_IN); + } + @Override + public void aggregateOutputChunk(int sizeInBytes) { + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_BYTES_OUT, sizeInBytes); + meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_OUT); } } public static class ScheduledContext extends DirectNestedSpanContext implements IReplayContexts.IScheduledContext { - public ScheduledContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + private final Instant scheduledFor; + + public ScheduledContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope, + Instant scheduledFor) { super(enclosingScope); - setCurrentSpan(); + this.scheduledFor = scheduledFor; + initializeSpan(); + } + + @Override + public void sendMeterEventsForEnd() { + super.sendMeterEventsForEnd(); + meterHistogramMillis(IReplayContexts.MetricNames.NETTY_SCHEDULE_LAG, + Duration.between(scheduledFor, Instant.now())); + } } @@ -150,7 +241,18 @@ public static class TargetRequestContext implements IReplayContexts.ITargetRequestContext { public TargetRequestContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); + meterHistogramMillis(IReplayContexts.MetricNames.SOURCE_TO_TARGET_REQUEST_LAG, + Duration.between(enclosingScope.getTimeOfOriginalRequest(), Instant.now())); + } + @Override + public void onBytesSent(int size) { + meterIncrementEvent(IReplayContexts.MetricNames.BYTES_WRITTEN_TO_TARGET, size); + } + + @Override + public void onBytesReceived(int size) { + meterIncrementEvent(IReplayContexts.MetricNames.BYTES_READ_FROM_TARGET, size); } } @@ -159,7 +261,7 @@ public static class RequestSendingContext implements IReplayContexts.IRequestSendingContext { public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -168,7 +270,7 @@ public static class WaitingForHttpResponseContext implements IReplayContexts.IWaitingForHttpResponseContext { public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -177,7 +279,7 @@ public static class ReceivingHttpResponseContext implements IReplayContexts.IReceivingHttpResponseContext { public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -186,7 +288,7 @@ public static class TupleHandlingContext implements IReplayContexts.ITupleHandlingContext { public TupleHandlingContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index d447dbd06..dd5445c50 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -14,7 +14,7 @@ public static class ReadChunkContext { public ReadChunkContext(T enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -24,7 +24,7 @@ public static class BackPressureBlockContext { public BackPressureBlockContext(@NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } @@ -33,7 +33,7 @@ public static class WaitForNextSignal implements ITrafficSourceContexts.IWaitForNextSignal { public WaitForNextSignal(@NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { super(enclosingScope); - setCurrentSpan(); + initializeSpan(); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 5edbd3ac1..cb2065fc5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -99,7 +99,7 @@ public Duration getBufferTimeWindow() { log.info("BlockingTrafficSource::composing"); return underlyingSource.readNextTrafficStreamChunk(readContext); }) - .whenComplete((v,t)->readContext.endSpan()); + .whenComplete((v,t)->readContext.close()); return trafficStreamListFuture.whenComplete((v, t) -> { if (t != null) { return; @@ -161,7 +161,7 @@ private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) } } if (blockContext != null) { - blockContext.endSpan(); + blockContext.close(); } return null; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 72b999044..ceab43fc7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -40,7 +40,7 @@ private static class IOSTrafficStreamContext public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, ITrafficStreamKey tsk) { super(ctx); this.trafficStreamKey = tsk; - setCurrentSpan(); + initializeSpan(); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java index 3413e68c5..a078ae8c8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java @@ -44,7 +44,7 @@ public void onExpireAccumulation(String partitionId, accumulation, ts); var rrPair = createdAccumulations.get(i).getOrCreateTransactionPair( PojoTrafficStreamKeyAndContext.build("n","c",1, - k->new TestTrafficStreamsLifecycleContext(context, k))); + k->new TestTrafficStreamsLifecycleContext(context, k)), Instant.EPOCH); rrPair.addResponseData(ts, ("Add"+i).getBytes(StandardCharsets.UTF_8)); expiredCountsPerLoop.add(expiredAccumulations.size()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java index 09519e283..c7508382b 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java @@ -48,7 +48,7 @@ public void onExpireAccumulation(String partitionId, createdAccumulations.add(accumulation); if (accumulation != null) { var rrPair = accumulation.getOrCreateTransactionPair(PojoTrafficStreamKeyAndContext.build("n","c",1, - k->new TestTrafficStreamsLifecycleContext(context, k))); + k->new TestTrafficStreamsLifecycleContext(context, k)), Instant.EPOCH); rrPair.addResponseData(ts, ("Add" + i).getBytes(StandardCharsets.UTF_8)); } expiredCountsPerLoop.add(expiredAccumulations.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 59bb19623..396da06b4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -238,7 +238,8 @@ private void testOutputterForRequest(String requestResourceName, String expected k->new TestTrafficStreamsLifecycleContext(context, k)); var requestCtx = TestRequestKey.getTestConnectionRequestContext(context, 0); trafficStreamKey.setTrafficStreamsContext(requestCtx.getImmediateEnclosingScope()); - var sourcePair = new RequestResponsePacketPair(trafficStreamKey, 0, 0); + var sourcePair = new RequestResponsePacketPair(trafficStreamKey, Instant.EPOCH, + 0, 0); var rawRequestData = loadResourceAsBytes("/requests/raw/" + requestResourceName); sourcePair.addRequestData(Instant.EPOCH, rawRequestData); var rawResponseData = NettyPacketToHttpConsumerTest.EXPECTED_RESPONSE_STRING.getBytes(StandardCharsets.UTF_8); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index bc79d201a..dfe645ebe 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -10,6 +10,9 @@ import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.KafkaConsumerContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -45,8 +48,10 @@ public void testRecordToString() { .setNodeId("n") .setNumber(7) .build(); - var contextFactory = new ChannelContextManager(TestContext.noTracking()); - var tsk = new TrafficStreamKeyWithKafkaRecordId(contextFactory, ts, "testRecord", 1, 2, 123); + var tsk = new TrafficStreamKeyWithKafkaRecordId( + k -> new ReplayContexts.KafkaRecordContext( + new ChannelContextManager(TestContext.noTracking()).retainOrCreateContext(k), "", 1), + ts, 1, 2, 123); Assertions.assertEquals("n.c.7|partition=2|offset=123", tsk.toString()); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index ad05dcd40..c6c9bc1c1 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -1,5 +1,7 @@ package org.opensearch.migrations.replay; +import java.time.Instant; + import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.SimpleMeteringClosure; @@ -24,6 +26,7 @@ private TestRequestKey() {} PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, tsk -> new TestTrafficStreamsLifecycleContext(ctx, tsk)), 0, replayerIdx); - return new ReplayContexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk); + return new ReplayContexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk, + Instant.EPOCH); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index d8e2da3cf..024bc098f 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -15,7 +15,7 @@ class TestTrafficStreamsLifecycleContext public TestTrafficStreamsLifecycleContext(IInstrumentationAttributes rootContext, ITrafficStreamKey tsk) { super(new ReplayContexts.ChannelKeyContext(rootContext, tsk)); this.trafficStreamKey = tsk; - setCurrentSpan(); + initializeSpan(); } @Override @@ -34,8 +34,8 @@ public ITrafficStreamKey getTrafficStreamKey() { } @Override - public void endSpan() { - super.endSpan(); + public void close() { + super.close(); getLogicalEnclosingScope().close(); } } From 668448693121d1ab673abdeb018c81277611fcb7 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 3 Jan 2024 00:43:16 -0500 Subject: [PATCH 43/94] Minor bugfixes that make a huge difference. Fix a broken unit test as a metric name had changed and meter delta events as upDownCounters. Signed-off-by: Greg Schohn --- .../migrations/tracing/IInstrumentationAttributes.java | 2 +- .../opensearch/migrations/replay/FullTrafficReplayerTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 641f25942..ebb7d1cbb 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -44,7 +44,7 @@ default void meterIncrementEvent(String eventName, long increment) { getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName, increment); } default void meterDeltaEvent(String eventName, long delta) { - getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName, delta); + getRootInstrumentationScope().buildMeter(this).meterDeltaEvent(eventName, delta); } default void meterHistogramMicros(String eventName, Duration value) { getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, value); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index d4fa7ff45..3e9dd416e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -192,7 +192,7 @@ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSp // ideally, we'd be getting these back too, but our requests are malformed, so the server closes, which // may occur before we've started to accumulate the response. So - just ignore these, but make sure that // there isn't anything else that we've missed. - byName.remove("receivingRequest"); + byName.remove("receivingResponse"); Assertions.assertEquals("", byName.entrySet().stream() .map(kvp->kvp.getKey()+":"+kvp.getValue()).collect(Collectors.joining())); From 7bf4388a3f565eb4a979da332ddd82fe50119089 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 3 Jan 2024 17:33:35 -0500 Subject: [PATCH 44/94] Some refactoring to increase the typesafety and to support greater control over which attributes are included within metrics and spans. I've also added "activeConnection" for capture connections. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 2 - .../tracing/KafkaRecordContext.java | 3 +- .../tracing/ConnectionContext.java | 38 ++++---- .../tracing/AbstractNestedSpanContext.java | 11 ++- .../tracing/FilteringAttributeBuilder.java | 55 ++++++++++++ .../tracing/IInstrumentConstructor.java | 9 +- .../tracing/IInstrumentationAttributes.java | 37 ++++---- .../IScopedInstrumentationAttributes.java | 2 +- .../migrations/tracing/IWithStartTime.java | 7 -- .../tracing/IWithStartTimeAndAttributes.java | 41 ++++++++- .../migrations/tracing/MeteringClosure.java | 35 ++++++++ .../tracing/MeteringClosureForStartTimes.java | 48 ++++++++++ .../migrations/tracing/RootOtelContext.java | 87 ++++--------------- .../tracing/SimpleMeteringClosure.java | 65 -------------- ...nditionallyReliableLoggingHttpHandler.java | 5 +- .../netty/LoggingHttpHandler.java | 7 +- .../proxyserver/CaptureProxy.java | 1 - .../netty/BacksideConnectionPool.java | 2 - .../netty/NettyScanningHttpProxy.java | 3 +- .../netty/ProxyChannelInitializer.java | 9 +- .../migrations/replay/Accumulation.java | 1 - .../migrations/replay/TrafficReplayer.java | 1 - .../kafka/KafkaTrafficCaptureSource.java | 1 - .../replay/tracing/ReplayContexts.java | 3 +- .../migrations/replay/TestRequestKey.java | 1 - 25 files changed, 260 insertions(+), 214 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 4bbad1e9d..4778664f6 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -10,7 +10,6 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; @@ -19,7 +18,6 @@ import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.KafkaRecordContext; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 559d0f877..288b1917e 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -6,10 +6,9 @@ import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.IWithStartTime; public class KafkaRecordContext extends DirectNestedSpanContext - implements IScopedInstrumentationAttributes, IWithStartTime { + implements IScopedInstrumentationAttributes { static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); static final AttributeKey RECORD_SIZE_ATTR = AttributeKey.longKey("recordSize"); diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index adfa615bc..6669006d5 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -1,36 +1,40 @@ package org.opensearch.migrations.trafficcapture.tracing; -import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.common.Attributes; import lombok.Getter; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.ISpanGenerator; -import org.opensearch.migrations.tracing.ISpanWithParentGenerator; +import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import org.opensearch.migrations.tracing.FilteringAttributeBuilder; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.IWithStartTime; -import java.time.Instant; +import java.util.Set; + +public class ConnectionContext extends AbstractNestedSpanContext implements IConnectionContext { + private static final Set KEYS_TO_ALLOW_FOR_ACTIVE_CONNECTION_COUNT = Set.of(CONNECTION_ID_ATTR.getKey()); + public static final String ACTIVE_CONNECTION = "activeConnection"; -public class ConnectionContext implements IConnectionContext, IWithStartTime { @Getter public final String connectionId; @Getter public final String nodeId; - @Getter - public final Span currentSpan; - @Getter - private final Instant startTime; - @Getter - final IInstrumentConstructor rootInstrumentationScope; @Override public String getActivityName() { return "captureConnection"; } - public ConnectionContext(IInstrumentConstructor rootInstrumentationScope, + public ConnectionContext(RootOtelContext rootInstrumentationScope, String connectionId, String nodeId) { - this.rootInstrumentationScope = rootInstrumentationScope; + super(rootInstrumentationScope); this.connectionId = connectionId; this.nodeId = nodeId; - this.currentSpan = rootInstrumentationScope.buildSpanWithoutParent("","connectionLifetime"); - this.startTime = Instant.now(); + initializeSpan(); + meterDeltaEvent(ACTIVE_CONNECTION, 1, + new FilteringAttributeBuilder(Attributes.builder(), false, KEYS_TO_ALLOW_FOR_ACTIVE_CONNECTION_COUNT)); + } + + @Override + public void sendMeterEventsForEnd() { + super.sendMeterEventsForEnd(); + meterDeltaEvent(ACTIVE_CONNECTION, -1, + new FilteringAttributeBuilder(Attributes.builder(), false, KEYS_TO_ALLOW_FOR_ACTIVE_CONNECTION_COUNT)); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java index 7ed3ea46b..c340ff5a5 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java @@ -1,5 +1,7 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; import lombok.Getter; import lombok.NonNull; @@ -7,7 +9,7 @@ import java.time.Instant; public abstract class AbstractNestedSpanContext - implements IScopedInstrumentationAttributes, IWithStartTime, AutoCloseable { + implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, AutoCloseable { final T enclosingScope; @Getter final Instant startTime; @Getter private Span currentSpan; @@ -27,7 +29,12 @@ public IInstrumentationAttributes getEnclosingScope() { public T getImmediateEnclosingScope() { return enclosingScope; } protected void initializeSpan() { - initializeSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), getActivityName())); + initializeSpan(Attributes.builder()); + } + + protected void initializeSpan(AttributesBuilder attributesBuilder) { + initializeSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), getActivityName(), + attributesBuilder)); } public void initializeSpan(@NonNull Span s) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java new file mode 100644 index 000000000..4e2bbb211 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java @@ -0,0 +1,55 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; +import lombok.AllArgsConstructor; +import lombok.Getter; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +@Getter +public class FilteringAttributeBuilder implements AttributesBuilder { + private AttributesBuilder underlyingBuilder; + public final boolean matchExcludes; + public final Set keysToMatch; + + public FilteringAttributeBuilder(AttributesBuilder underlyingBuilder, boolean matchesExclude, + Set keysToMatch) { + this.underlyingBuilder = underlyingBuilder; + this.matchExcludes = matchesExclude; + this.keysToMatch = Collections.unmodifiableSet(keysToMatch); + } + + @Override + public Attributes build() { + return underlyingBuilder.build(); + } + + @Override + public AttributesBuilder put(AttributeKey key, int value) { + if (keysToMatch.contains(key.getKey()) == matchExcludes) { + return this; + } + underlyingBuilder = underlyingBuilder.put(key, value); + return this; + } + + @Override + public AttributesBuilder put(AttributeKey key, T value) { + if (keysToMatch.contains(key.getKey()) == matchExcludes) { + return this; + } + underlyingBuilder = underlyingBuilder.put(key, value); + return this; + } + + @Override + public AttributesBuilder putAll(Attributes attributes) { + attributes.forEach((k,v)->{ this.underlyingBuilder = underlyingBuilder.put((AttributeKey)k,v); }); + return this; + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index d90d9e52e..9f97b2219 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -1,10 +1,11 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; public interface IInstrumentConstructor { - Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName); - Span buildSpanWithoutParent(String scopeName, String spanName); - SimpleMeteringClosure buildMeter(IInstrumentationAttributes context); + Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, + AttributesBuilder attributesBuilder); + MeteringClosure buildSimpleMeter(IInstrumentationAttributes context); + MeteringClosureForStartTimes buildMeter(IWithStartTimeAndAttributes context); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index ebb7d1cbb..b40af6672 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -18,14 +18,13 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; } - default Attributes getPopulatedAttributes() { - return getPopulatedAttributesBuilder().build(); + default Attributes getPopulatedAttributes(AttributesBuilder builder) { + return getPopulatedAttributesBuilder(builder).build(); } - default AttributesBuilder getPopulatedAttributesBuilder() { + default AttributesBuilder getPopulatedAttributesBuilder(AttributesBuilder builder) { var currentObj = this; var stack = new ArrayList(); - var builder = Attributes.builder(); while (currentObj != null) { stack.add(currentObj); currentObj = currentObj.getEnclosingScope(); @@ -38,28 +37,22 @@ default AttributesBuilder getPopulatedAttributesBuilder() { } default void meterIncrementEvent(String eventName) { - getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName); + meterIncrementEvent(eventName, Attributes.builder()); } - default void meterIncrementEvent(String eventName, long increment) { - getRootInstrumentationScope().buildMeter(this).meterIncrementEvent(eventName, increment); - } - default void meterDeltaEvent(String eventName, long delta) { - getRootInstrumentationScope().buildMeter(this).meterDeltaEvent(eventName, delta); - } - default void meterHistogramMicros(String eventName, Duration value) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, value); + default void meterIncrementEvent(String eventName, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildSimpleMeter(this).meterIncrementEvent(eventName, attributesBuilder); } - default void meterHistogramMillis(String eventName, Duration value) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName, value); + default void meterIncrementEvent(String eventName, long increment) { + meterIncrementEvent (eventName, increment, Attributes.builder()); } - default void meterHistogram(String eventName, String units, long value) { - getRootInstrumentationScope().buildMeter(this).meterHistogram(eventName, units, value); + default void meterIncrementEvent(String eventName, long increment, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildSimpleMeter(this) + .meterIncrementEvent(eventName, increment, attributesBuilder); } - default void meterHistogramMicros(String eventName) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName); + default void meterDeltaEvent(String eventName, long delta) { + meterDeltaEvent(eventName, delta, Attributes.builder()); } - default void meterHistogramMillis(String eventName) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName); + default void meterDeltaEvent(String eventName, long delta, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildSimpleMeter(this).meterDeltaEvent(eventName, delta, attributesBuilder); } - } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 42444cfeb..e40cf9c9d 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -3,7 +3,7 @@ import io.opentelemetry.api.trace.Span; import lombok.NonNull; -public interface IScopedInstrumentationAttributes extends IInstrumentationAttributes, AutoCloseable { +public interface IScopedInstrumentationAttributes extends IWithStartTimeAndAttributes, AutoCloseable { String getActivityName(); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java deleted file mode 100644 index b8e362ddb..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTime.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.opensearch.migrations.tracing; - -import java.time.Instant; - -public interface IWithStartTime { - Instant getStartTime(); -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 76f3c04c8..19953f82a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -1,4 +1,43 @@ package org.opensearch.migrations.tracing; -public interface IWithStartTimeAndAttributes extends IWithStartTime, IInstrumentationAttributes { +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; + +import java.time.Duration; +import java.time.Instant; + +public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { + Instant getStartTime(); + + + default void meterHistogramMicros(String eventName, Duration value) { + meterHistogramMicros(eventName, value, Attributes.builder()); + } + default void meterHistogramMicros(String eventName, Duration value, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, value, attributesBuilder); + } + default void meterHistogramMillis(String eventName, Duration value) { + meterHistogramMillis(eventName, value, Attributes.builder()); + } + default void meterHistogramMillis(String eventName, Duration value, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName, value, attributesBuilder); + } + default void meterHistogram(String eventName, String units, long value) { + meterHistogram(eventName, units, value, Attributes.builder()); + } + default void meterHistogram(String eventName, String units, long value, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeter(this).meterHistogram(eventName, units, value, attributesBuilder); + } + default void meterHistogramMicros(String eventName) { + meterHistogramMicros(eventName, Attributes.builder()); + } + default void meterHistogramMicros(String eventName, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, attributesBuilder); + } + default void meterHistogramMillis(String eventName) { + meterHistogramMillis(eventName, Attributes.builder()); + } + default void meterHistogramMillis(String eventName, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName, attributesBuilder); + } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java new file mode 100644 index 000000000..cc13e1120 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java @@ -0,0 +1,35 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.Meter; +import lombok.AllArgsConstructor; + +import java.time.Duration; +import java.time.Instant; + +@AllArgsConstructor +public class MeteringClosure { + public final IInstrumentationAttributes ctx; + public final Meter meter; + + public void meterIncrementEvent(String eventName, AttributesBuilder attributesBuilder) { + meterIncrementEvent(eventName, 1, attributesBuilder); + } + + public void meterIncrementEvent(String eventName, long increment, AttributesBuilder attributesBuilder) { + if (ctx == null) { + return; + } + meter.counterBuilder(eventName) + .build().add(increment, ctx.getPopulatedAttributesBuilder(attributesBuilder) + .put("labelName", eventName) + .build()); + } + + public void meterDeltaEvent(String eventName, long delta, AttributesBuilder attributesBuilder) { + meter.upDownCounterBuilder(eventName) + .build().add(delta, ctx.getPopulatedAttributesBuilder(attributesBuilder) + .put("labelName", eventName) + .build()); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java new file mode 100644 index 000000000..fcd85bca8 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java @@ -0,0 +1,48 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.Meter; + +import java.time.Duration; +import java.time.Instant; + +public class MeteringClosureForStartTimes extends MeteringClosure { + + MeteringClosureForStartTimes(IWithStartTimeAndAttributes ctx, Meter meter) { + super(ctx, meter); + } + + public IWithStartTimeAndAttributes getContext() { + return (IWithStartTimeAndAttributes) ctx; + } + + public void meterHistogramMicros(String eventName, Duration between, AttributesBuilder attributesBuilder) { + meterHistogram(eventName, "us", between.toNanos()*1000, attributesBuilder); + } + + public void meterHistogramMillis(String eventName, Duration between, AttributesBuilder attributesBuilder) { + meterHistogram(eventName, "ms", between.toMillis(), attributesBuilder); + } + + public void meterHistogram(String eventName, String units, long value, AttributesBuilder attributesBuilder) { + if (ctx == null) { + return; + } + meter.histogramBuilder(eventName) + .ofLongs() + .setUnit(units) + .build().record(value, ctx.getPopulatedAttributesBuilder(attributesBuilder) + .put("labelName", eventName) + .build()); + } + + public void meterHistogramMillis(String eventName, AttributesBuilder attributesBuilder) { + meterHistogram(eventName, "ms", Duration.between(getContext().getStartTime(), Instant.now()).toMillis(), + attributesBuilder); + } + + public void meterHistogramMicros(String eventName, AttributesBuilder attributesBuilder) { + meterHistogram(eventName, "us", + Duration.between(getContext().getStartTime(), Instant.now()).toNanos()*1000, attributesBuilder); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index f7cfa5f88..1df57d35c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -3,7 +3,6 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.context.Context; @@ -22,7 +21,6 @@ import lombok.NonNull; import java.time.Duration; -import java.time.Instant; import java.util.Optional; import java.util.concurrent.TimeUnit; @@ -100,93 +98,40 @@ public IInstrumentationAttributes getEnclosingScope() { return null; } + OpenTelemetry getOpenTelemetry() { + return openTelemetryImpl; + } + @Override @NonNull public IInstrumentConstructor getRootInstrumentationScope() { return this; } + public MeteringClosure buildSimpleMeter(IInstrumentationAttributes ctx) { + return new MeteringClosure(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); + } + + public MeteringClosureForStartTimes buildMeter(IWithStartTimeAndAttributes ctx) { + return new MeteringClosureForStartTimes(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); + } + @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; // nothing more to do } - public static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan) { + private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan) { return Optional.ofNullable(parentSpan).map(p -> builder.setParent(Context.current().with(p))) .orElseGet(builder::setNoParent) .startSpan().setAllAttributes(attrs); } @Override - public Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName) { + public Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, + AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); - return buildSpanWithParent(spanBuilder, getPopulatedAttributes(), parentSpan); - } - - public Span buildSpanWithoutParent(String scopeName, String spanName) { - var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); - return buildSpanWithParent(spanBuilder, getPopulatedAttributes(), null); - } - - public SimpleMeteringClosure buildMeter(IInstrumentationAttributes ctx) { - return new SimpleMeteringClosure(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); - } - - OpenTelemetry getOpenTelemetry() { - return openTelemetryImpl; - } - - public void meterIncrementEvent(Meter meter, IInstrumentationAttributes ctx, String eventName) { - meterIncrementEvent(meter, ctx, eventName, 1); - } - - public void meterIncrementEvent(Meter meter, IInstrumentationAttributes ctx, String eventName, long increment) { - meter.counterBuilder(eventName) - .build().add(increment, ctx.getPopulatedAttributesBuilder() - .put("labelName", eventName) - .build()); - } - - public void meterDeltaEvent(Meter meter, IInstrumentationAttributes ctx, String eventName, long delta) { - if (ctx == null) { - return; - } - meter.upDownCounterBuilder(eventName) - .build().add(delta, ctx.getPopulatedAttributesBuilder() - .put("labelName", eventName) - .build()); - } - - public - void meterHistogramMillis(Meter meter, T ctx, String eventName) { - meterHistogram(meter, ctx, eventName, "ms", - Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); - } - - public - void meterHistogramMicros(Meter meter, T ctx, String eventName) { - meterHistogram(meter, ctx, eventName, "us", - Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); - } - - public void meterHistogramMillis(Meter meter, IInstrumentationAttributes ctx, String eventName, Duration between) { - meterHistogram(meter, ctx, eventName, "ms", between.toMillis()); - } - - public void meterHistogramMicros(Meter meter, IInstrumentationAttributes ctx, String eventName, Duration between) { - meterHistogram(meter, ctx, eventName, "us", between.toNanos()*1000); - } - - public void meterHistogram(Meter meter, IInstrumentationAttributes ctx, String eventName, String units, long value) { - if (ctx == null) { - return; - } - meter.histogramBuilder(eventName) - .ofLongs() - .setUnit(units) - .build().record(value, ctx.getPopulatedAttributesBuilder() - .put("labelName", eventName) - .build()); + return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java deleted file mode 100644 index d73cf9cf3..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/SimpleMeteringClosure.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.metrics.Meter; -import lombok.AllArgsConstructor; - -import java.time.Duration; -import java.time.Instant; - -@AllArgsConstructor -public class SimpleMeteringClosure { - public final T ctx; - public final Meter meter; - - public void meterIncrementEvent(String eventName) { - meterIncrementEvent(eventName, 1); - } - - public void meterIncrementEvent(String eventName, long increment) { - if (ctx == null) { - return; - } - meter.counterBuilder(eventName) - .build().add(increment, ctx.getPopulatedAttributesBuilder() - .put("labelName", eventName) - .build()); - } - - public void meterDeltaEvent(String eventName, long delta) { - meter.upDownCounterBuilder(eventName) - .build().add(delta, ctx.getPopulatedAttributesBuilder() - .put("labelName", eventName) - .build()); - } - - public void meterHistogramMicros(String eventName, Duration between) { - meterHistogram(eventName, "us", between.toNanos()*1000); - } - - public void meterHistogramMillis(String eventName, Duration between) { - meterHistogram(eventName, "ms", between.toMillis()); - } - - public void meterHistogram(String eventName, String units, long value) { - if (ctx == null) { - return; - } - meter.histogramBuilder(eventName) - .ofLongs() - .setUnit(units) - .build().record(value, ctx.getPopulatedAttributesBuilder() - .put("labelName", eventName) - .build()); - } - - public void meterHistogramMillis(String eventName) { - meterHistogram(eventName, "ms", - Duration.between(ctx.getStartTime(), Instant.now()).toMillis()); - } - - public void meterHistogramMicros(String eventName) { - meterHistogram(eventName, "us", - Duration.between(ctx.getStartTime(), Instant.now()).toNanos()*1000); - } - -} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index 76c7c3b0b..26573d17b 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -7,6 +7,7 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; @@ -17,13 +18,13 @@ public class ConditionallyReliableLoggingHttpHandler extends LoggingHttpHandler { private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpHandler(@NonNull IInstrumentConstructor contextConstructor, + public ConditionallyReliableLoggingHttpHandler(@NonNull RootOtelContext rootContext, @NonNull String nodeId, String connectionId, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate requestCapturePredicate, @NonNull Predicate headerPredicateForWhenToBlock) throws IOException { - super(contextConstructor, nodeId, connectionId, trafficOffloaderFactory, requestCapturePredicate); + super(rootContext, nodeId, connectionId, trafficOffloaderFactory, requestCapturePredicate); this.shouldBlockPredicate = headerPredicateForWhenToBlock; } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 2c2d7409a..663cdde92 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -22,8 +22,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; @@ -139,11 +138,11 @@ public HttpRequest resetCurrentRequest() { protected HttpMessageContext messageContext; - public LoggingHttpHandler(@NonNull IInstrumentConstructor contextConstructor, String nodeId, String channelKey, + public LoggingHttpHandler(@NonNull RootOtelContext rootContext, String nodeId, String channelKey, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate httpHeadersCapturePredicate) throws IOException { - var parentContext = new ConnectionContext(contextConstructor, channelKey, nodeId); + var parentContext = new ConnectionContext(rootContext, channelKey, nodeId); this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.HttpTransactionState.REQUEST); messageContext.meterIncrementEvent("requestStarted"); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index c60dd175d..76a2313e5 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -18,7 +18,6 @@ import org.apache.logging.log4j.core.util.NullOutputStream; import org.opensearch.common.settings.Settings; import org.opensearch.migrations.tracing.RootOtelContext; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.FileConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/BacksideConnectionPool.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/BacksideConnectionPool.java index 95c1ad115..b1bf526cb 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/BacksideConnectionPool.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/BacksideConnectionPool.java @@ -1,8 +1,6 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; import io.netty.channel.socket.nio.NioSocketChannel; -import io.netty.handler.logging.LogLevel; -import org.opensearch.migrations.coreutils.MetricsLogger; import org.slf4j.event.Level; import io.netty.bootstrap.Bootstrap; import io.netty.channel.ChannelDuplexHandler; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java index 4325a27f1..38e428e5e 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java @@ -9,6 +9,7 @@ import io.netty.util.concurrent.DefaultThreadFactory; import lombok.NonNull; import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; @@ -29,7 +30,7 @@ public int getProxyPort() { return proxyPort; } - public void start(IInstrumentConstructor rootContext, + public void start(RootOtelContext rootContext, BacksideConnectionPool backsideConnectionPool, int numThreads, Supplier sslEngineSupplier, diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 4dc9cabed..4f4976b5c 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -7,6 +7,7 @@ import io.netty.handler.ssl.SslHandler; import lombok.NonNull; import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpHandler; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; @@ -18,16 +19,16 @@ public class ProxyChannelInitializer extends ChannelInitializer { private final IConnectionCaptureFactory connectionCaptureFactory; private final Supplier sslEngineProvider; - private final IInstrumentConstructor instrumentationConstructor; + private final RootOtelContext rootContext; private final BacksideConnectionPool backsideConnectionPool; private final RequestCapturePredicate requestCapturePredicate; - public ProxyChannelInitializer(IInstrumentConstructor instrumentationConstructor, + public ProxyChannelInitializer(RootOtelContext rootContext, BacksideConnectionPool backsideConnectionPool, Supplier sslEngineSupplier, IConnectionCaptureFactory connectionCaptureFactory, @NonNull RequestCapturePredicate requestCapturePredicate) { - this.instrumentationConstructor = instrumentationConstructor; + this.rootContext = rootContext; this.backsideConnectionPool = backsideConnectionPool; this.sslEngineProvider = sslEngineSupplier; this.connectionCaptureFactory = connectionCaptureFactory; @@ -50,7 +51,7 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler(instrumentationConstructor, + ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler(rootContext, "", connectionId, connectionCaptureFactory, requestCapturePredicate, this::shouldGuaranteeMessageOffloading)); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java index 99cf9d531..2da589b91 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Accumulation.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.replay; import lombok.NonNull; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index b82a3b4dc..9c0dff1cb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -18,7 +18,6 @@ import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 510318e01..04ba81123 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -18,7 +18,6 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.FileInputStream; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index cdf3ac649..c23f11444 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -7,7 +7,6 @@ import org.opensearch.migrations.tracing.AbstractNestedSpanContext; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IWithStartTime; import org.opensearch.migrations.tracing.IndirectNestedSpanContext; import java.time.Duration; @@ -18,7 +17,7 @@ public class ReplayContexts { private ReplayContexts() {} public static class ChannelKeyContext extends AbstractNestedSpanContext - implements IReplayContexts.IChannelKeyContext, IWithStartTime { + implements IReplayContexts.IChannelKeyContext { @Getter final ISourceTrafficChannelKey channelKey; diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index c6c9bc1c1..30460b6a9 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -4,7 +4,6 @@ import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.SimpleMeteringClosure; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; From 8ef03766a859cbbd4ff029458274d30c24a40b5b Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 4 Jan 2024 12:31:27 -0500 Subject: [PATCH 45/94] Fix mend security issue for json-path CVE by updating opensearch-security to 2.11.1.0 Signed-off-by: Greg Schohn --- TrafficCapture/trafficCaptureProxyServer/build.gradle | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index 81d2281dd..140fafcd1 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -15,8 +15,11 @@ configurations { dependencies { implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") - implementation 'org.opensearch.plugin:opensearch-security:2.6.0.0' - opensearchSecurityPlugin 'org.opensearch.plugin:opensearch-security:2.6.0.0' + implementation 'org.opensearch.plugin:opensearch-security:2.11.1.0' + implementation 'org.opensearch:opensearch-common:2.11.0' + implementation 'org.opensearch:opensearch-core:2.11.0' + implementation 'org.opensearch:opensearch:2.11.0' + opensearchSecurityPlugin 'org.opensearch.plugin:opensearch-security:2.11.1.0' implementation files(zipTree("$configurations.opensearchSecurityPlugin.singleFile").matching { include "*.jar" exclude "slf*.jar" @@ -28,10 +31,6 @@ dependencies { implementation project(':coreUtilities') implementation group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' - implementation 'org.opensearch:opensearch-common:2.6.0' - implementation 'org.opensearch:opensearch-core:2.6.0' - implementation 'org.opensearch:opensearch:2.8.0' - implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' From 37ae5488c44e5c6c08f4bc0ec9421633e7c7de20 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 4 Jan 2024 15:45:11 -0500 Subject: [PATCH 46/94] Remove zipkin as a tracing sink. I've personally been using jaeger more and zipkin has been crashing with an OOM on startup (probably due to my docker setup) and I suspect that it has been causing greater problems with the otel collector. Signed-off-by: Greg Schohn --- .../dockerSolution/src/main/docker/docker-compose.yml | 9 --------- .../src/main/docker/otel-collector-config-demo.yaml | 6 +----- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index e588aa0c9..bc0d53d96 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -24,14 +24,6 @@ services: environment: - COLLECTOR_OTLP_ENABLED=true - # Zipkin - zipkin: - image: openzipkin/zipkin:latest - networks: - - migrations - ports: - - "9411:9411" - # Collector otel-collector: image: otel/opentelemetry-collector:latest @@ -50,7 +42,6 @@ services: - "4317:4317" # otlp receiver depends_on: - jaeger - - zipkin zookeeper: image: docker.io/bitnami/zookeeper:3.8 diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml index ac9a2a6d5..5fe680c43 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml @@ -11,10 +11,6 @@ exporters: logging: loglevel: debug - zipkin: - endpoint: "http://zipkin:9411/api/v2/spans" - format: proto - otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 endpoint: jaeger:4317 tls: @@ -33,7 +29,7 @@ service: traces: receivers: [otlp] processors: [] - exporters: [logging, zipkin, otlp/jaeger] + exporters: [logging, otlp/jaeger] metrics: receivers: [otlp] exporters: [logging, prometheus] From 1172912eb101399cba335ee0831d8720bffd60d9 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 5 Jan 2024 15:40:35 -0500 Subject: [PATCH 47/94] Make attribute name filtering more generic and fix a bug in negation so that the connectionId is now emitted from the activeConnections metric. Signed-off-by: Greg Schohn --- .../tracing/ConnectionContext.java | 10 ++--- .../AttributeNameMatchingPredicate.java | 42 +++++++++++++++++++ .../tracing/FilteringAttributeBuilder.java | 17 +++----- 3 files changed, 53 insertions(+), 16 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 6669006d5..96cb6842f 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -3,14 +3,14 @@ import io.opentelemetry.api.common.Attributes; import lombok.Getter; import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import org.opensearch.migrations.tracing.AttributeNameMatchingPredicate; import org.opensearch.migrations.tracing.FilteringAttributeBuilder; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import java.util.Set; - public class ConnectionContext extends AbstractNestedSpanContext implements IConnectionContext { - private static final Set KEYS_TO_ALLOW_FOR_ACTIVE_CONNECTION_COUNT = Set.of(CONNECTION_ID_ATTR.getKey()); + private static final AttributeNameMatchingPredicate KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT = + AttributeNameMatchingPredicate.builder(true).add(CONNECTION_ID_ATTR.getKey()).build(); public static final String ACTIVE_CONNECTION = "activeConnection"; @Getter @@ -28,13 +28,13 @@ public ConnectionContext(RootOtelContext rootInstrumentationScope, this.nodeId = nodeId; initializeSpan(); meterDeltaEvent(ACTIVE_CONNECTION, 1, - new FilteringAttributeBuilder(Attributes.builder(), false, KEYS_TO_ALLOW_FOR_ACTIVE_CONNECTION_COUNT)); + new FilteringAttributeBuilder(Attributes.builder(), KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT)); } @Override public void sendMeterEventsForEnd() { super.sendMeterEventsForEnd(); meterDeltaEvent(ACTIVE_CONNECTION, -1, - new FilteringAttributeBuilder(Attributes.builder(), false, KEYS_TO_ALLOW_FOR_ACTIVE_CONNECTION_COUNT)); + new FilteringAttributeBuilder(Attributes.builder(), KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT)); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java new file mode 100644 index 000000000..e1438e279 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java @@ -0,0 +1,42 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; + +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +public class AttributeNameMatchingPredicate implements Predicate { + private final boolean negate; + private final Set keysToMatch; + + public static class Builder { + private final Set namesSet = new HashSet<>(); + private final boolean negate; + public Builder(boolean negate) { + this.negate = negate; + } + public Builder add(String name) { + namesSet.add(name); + return this; + } + public AttributeNameMatchingPredicate build() { + return new AttributeNameMatchingPredicate(negate, namesSet); + } + } + + public static Builder builder(boolean negate) { + return new Builder(negate); + } + + private AttributeNameMatchingPredicate(boolean negate, Set keysToMatch) { + this.negate = negate; + this.keysToMatch = keysToMatch; + } + + @Override + public boolean test(AttributeKey attribute) { + return keysToMatch.contains(attribute.getKey()) == negate; + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java index 4e2bbb211..25ad04405 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java @@ -3,25 +3,20 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; -import lombok.AllArgsConstructor; import lombok.Getter; import java.util.Collections; -import java.util.HashSet; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; @Getter public class FilteringAttributeBuilder implements AttributesBuilder { private AttributesBuilder underlyingBuilder; - public final boolean matchExcludes; - public final Set keysToMatch; + private final Predicate excludePredicate; - public FilteringAttributeBuilder(AttributesBuilder underlyingBuilder, boolean matchesExclude, - Set keysToMatch) { + public FilteringAttributeBuilder(AttributesBuilder underlyingBuilder, Predicate excludePredicate) { this.underlyingBuilder = underlyingBuilder; - this.matchExcludes = matchesExclude; - this.keysToMatch = Collections.unmodifiableSet(keysToMatch); + this.excludePredicate = excludePredicate; } @Override @@ -31,7 +26,7 @@ public Attributes build() { @Override public AttributesBuilder put(AttributeKey key, int value) { - if (keysToMatch.contains(key.getKey()) == matchExcludes) { + if (excludePredicate.test(key)) { return this; } underlyingBuilder = underlyingBuilder.put(key, value); @@ -40,7 +35,7 @@ public AttributesBuilder put(AttributeKey key, int value) { @Override public AttributesBuilder put(AttributeKey key, T value) { - if (keysToMatch.contains(key.getKey()) == matchExcludes) { + if (excludePredicate.test(key)) { return this; } underlyingBuilder = underlyingBuilder.put(key, value); From 22296b7e3b3a15bdd9c08ea04bf4ed27ba85c0e9 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sun, 7 Jan 2024 20:46:03 -0500 Subject: [PATCH 48/94] Minor tweaks to the otel collector (including renaming from 'demo') and adding some TODOs for future research. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 1 + .../tracing/FilteringAttributeBuilder.java | 3 +++ .../tracing/IScopedInstrumentationAttributes.java | 4 ++++ .../src/main/docker/docker-compose.yml | 6 +++--- ...-config-demo.yaml => otel-collector-config.yaml} | 13 ++++++++++--- .../http/HttpJsonTransformingConsumer.java | 1 + 6 files changed, 22 insertions(+), 6 deletions(-) rename TrafficCapture/dockerSolution/src/main/docker/{otel-collector-config-demo.yaml => otel-collector-config.yaml} (72%) diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 4778664f6..4ddc646a7 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -78,6 +78,7 @@ class StreamManager extends OrderedStreamLifecyleManager { Instant startTime; public StreamManager(IConnectionContext ctx, String connectionId) { + // TODO - add https://opentelemetry.io/blog/2022/instrument-kafka-clients/ this.telemetryContext = ctx; ctx.meterIncrementEvent("offloader_created"); telemetryContext.meterDeltaEvent("offloaders_active", 1); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java index 25ad04405..5dd39279b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java @@ -9,6 +9,9 @@ import java.util.Set; import java.util.function.Predicate; +/** + * The use-case of filtering attributes in instruments might be better to implement via views. + */ @Getter public class FilteringAttributeBuilder implements AttributesBuilder { private AttributesBuilder underlyingBuilder; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index e40cf9c9d..135eee504 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -30,4 +30,8 @@ default void close() { endSpan(); sendMeterEventsForEnd(); } + + default void addException(Exception e) { + getCurrentSpan().recordException(e); + } } diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index bc0d53d96..75a1a4d3e 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -27,12 +27,12 @@ services: # Collector otel-collector: image: otel/opentelemetry-collector:latest -# command: ["--config=/etc/otel-collector-config-demo.yaml", "${OTELCOL_ARGS}"] +# command: ["--config=/etc/otel-collector-config.yaml", "${OTELCOL_ARGS}"] networks: - migrations volumes: -# - ./otel-collector-config-demo.yaml:/etc/otel-collector-config-demo.yaml - - ./otel-collector-config-demo.yaml:/etc/otelcol/config.yaml +# - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-config.yaml:/etc/otelcol/config.yaml ports: - "1888:1888" # pprof extension - "8888:8888" # Prometheus metrics exposed by the collector diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml similarity index 72% rename from TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml rename to TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml index 5fe680c43..0e95469c0 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config-demo.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml @@ -3,6 +3,12 @@ receivers: protocols: grpc: +processors: + batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 + exporters: prometheus: endpoint: "0.0.0.0:8889" @@ -28,8 +34,9 @@ service: pipelines: traces: receivers: [otlp] - processors: [] - exporters: [logging, otlp/jaeger] + processors: [batch] + exporters: [otlp/jaeger] metrics: receivers: [otlp] - exporters: [logging, prometheus] + processors: [batch] + exporters: [prometheus] diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index a7af34fb9..5ede26f06 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -116,6 +116,7 @@ public DiagnosticTrackableCompletableFuture Date: Sun, 7 Jan 2024 22:01:10 -0500 Subject: [PATCH 49/94] Set the aggregation temporality to delta rather than cumulative. I still need to continue to knock the dimensionality of the data (unique attributes) down considerably, but this at least mitigates the grpc overflow errors that I was seeing. Signed-off-by: Greg Schohn --- .../migrations/tracing/RootOtelContext.java | 62 ++++++++----------- 1 file changed, 26 insertions(+), 36 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 1df57d35c..03ed7f2ad 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -13,6 +13,7 @@ import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; @@ -32,42 +33,31 @@ public static OpenTelemetry initializeOpenTelemetry(String serviceName, String c .put(ResourceAttributes.SERVICE_NAME, serviceName) .build(); - var openTelemetrySdk = - OpenTelemetrySdk.builder() - .setLoggerProvider( - SdkLoggerProvider.builder() - .setResource(serviceResource) - .addLogRecordProcessor( - BatchLogRecordProcessor.builder( - OtlpGrpcLogRecordExporter.builder() - .setEndpoint(collectorEndpoint) - .build()) - .build()) - .build()) - .setTracerProvider( - SdkTracerProvider.builder() - .setResource(serviceResource) - .addSpanProcessor( - BatchSpanProcessor.builder( - OtlpGrpcSpanExporter.builder() - .setEndpoint(collectorEndpoint) - .setTimeout(2, TimeUnit.SECONDS) - .build()) - .setScheduleDelay(100, TimeUnit.MILLISECONDS) - .build()) - .build()) - .setMeterProvider( - SdkMeterProvider.builder() - .setResource(serviceResource) - .registerMetricReader( - PeriodicMetricReader.builder( - OtlpGrpcMetricExporter.builder() - .setEndpoint(collectorEndpoint) - .build()) - .setInterval(Duration.ofMillis(1000)) - .build()) - .build()) - .build(); + final var spanProcessor = BatchSpanProcessor.builder(OtlpGrpcSpanExporter.builder() + .setEndpoint(collectorEndpoint) + .setTimeout(2, TimeUnit.SECONDS) + .build()) + .setScheduleDelay(100, TimeUnit.MILLISECONDS) + .build(); + final var metricReader = PeriodicMetricReader.builder(OtlpGrpcMetricExporter.builder() + .setEndpoint(collectorEndpoint) + .setAggregationTemporalitySelector(AggregationTemporalitySelector.deltaPreferred()) + .build()) + .setInterval(Duration.ofMillis(1000)) + .build(); + final var logProcessor = BatchLogRecordProcessor.builder(OtlpGrpcLogRecordExporter.builder() + .setEndpoint(collectorEndpoint) + .build()) + .build(); + + var openTelemetrySdk = OpenTelemetrySdk.builder() + .setTracerProvider(SdkTracerProvider.builder().setResource(serviceResource) + .addSpanProcessor(spanProcessor).build()) + .setMeterProvider(SdkMeterProvider.builder().setResource(serviceResource) + .registerMetricReader(metricReader).build()) + .setLoggerProvider(SdkLoggerProvider.builder().setResource(serviceResource) + .addLogRecordProcessor(logProcessor).build()) + .build(); // Add hook to close SDK, which flushes logs Runtime.getRuntime().addShutdownHook(new Thread(openTelemetrySdk::close)); From fdd8141eb3eae0170c997a199e2bbc2c3ae97f0a Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 8 Jan 2024 08:20:42 -0500 Subject: [PATCH 50/94] Wrap all metric emissions within the context's span so that the metric can be emitted with the span data as its exemplar. Signed-off-by: Greg Schohn --- .../migrations/tracing/MeteringClosure.java | 27 ++++++++++++------- .../tracing/MeteringClosureForStartTimes.java | 16 ++++++----- .../tracing/NullableExemplarScope.java | 20 ++++++++++++++ 3 files changed, 47 insertions(+), 16 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/NullableExemplarScope.java diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java index cc13e1120..322a91e09 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java @@ -2,11 +2,13 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; -import java.time.Duration; -import java.time.Instant; +@Slf4j @AllArgsConstructor public class MeteringClosure { public final IInstrumentationAttributes ctx; @@ -20,16 +22,21 @@ public void meterIncrementEvent(String eventName, long increment, AttributesBuil if (ctx == null) { return; } - meter.counterBuilder(eventName) - .build().add(increment, ctx.getPopulatedAttributesBuilder(attributesBuilder) - .put("labelName", eventName) - .build()); + try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { + meter.counterBuilder(eventName) + .build().add(increment, ctx.getPopulatedAttributesBuilder(attributesBuilder) + .put("labelName", eventName) + .build()); + } } public void meterDeltaEvent(String eventName, long delta, AttributesBuilder attributesBuilder) { - meter.upDownCounterBuilder(eventName) - .build().add(delta, ctx.getPopulatedAttributesBuilder(attributesBuilder) - .put("labelName", eventName) - .build()); + try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { + log.warn("Log with or without trace context?"); + meter.upDownCounterBuilder(eventName) + .build().add(delta, ctx.getPopulatedAttributesBuilder(attributesBuilder) + .put("labelName", eventName) + .build()); + } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java index fcd85bca8..7bba1b207 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java @@ -2,6 +2,8 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; import java.time.Duration; import java.time.Instant; @@ -28,12 +30,14 @@ public void meterHistogram(String eventName, String units, long value, Attribute if (ctx == null) { return; } - meter.histogramBuilder(eventName) - .ofLongs() - .setUnit(units) - .build().record(value, ctx.getPopulatedAttributesBuilder(attributesBuilder) - .put("labelName", eventName) - .build()); + try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { + meter.histogramBuilder(eventName) + .ofLongs() + .setUnit(units) + .build().record(value, ctx.getPopulatedAttributesBuilder(attributesBuilder) + .put("labelName", eventName) + .build()); + } } public void meterHistogramMillis(String eventName, AttributesBuilder attributesBuilder) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/NullableExemplarScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/NullableExemplarScope.java new file mode 100644 index 000000000..f84c3f802 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/NullableExemplarScope.java @@ -0,0 +1,20 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; + +public class NullableExemplarScope implements Scope { + final Scope underlyingScope; + + public NullableExemplarScope(Span span) { + underlyingScope = span == null ? null : Context.current().with(span).makeCurrent(); + } + + @Override + public void close() { + if (underlyingScope != null) { + underlyingScope.close(); + } + } +} From 490521d4834b1f4b76ddc53d7601f4d27f528e7c Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 9 Jan 2024 08:46:28 -0500 Subject: [PATCH 51/94] In progress changes. I'm trying to track down a regression and want to preserve new work first. Signed-off-by: Greg Schohn --- .../KafkaCaptureFactoryTest.java | 5 ++-- .../tracing/ConnectionContext.java | 7 +++-- .../tracing/IRootOffloaderContext.java | 9 ++++++ .../tracing/RootOffloaderContext.java | 21 ++++++++++++++ .../migrations/tracing/IRootOtelContext.java | 7 +++++ .../migrations/tracing/MeteringClosure.java | 9 ++++-- .../migrations/tracing/RootOtelContext.java | 28 +++++++++++++------ .../src/main/docker/docker-compose.yml | 3 ++ ...nditionallyReliableLoggingHttpHandler.java | 3 +- .../netty/LoggingHttpHandler.java | 3 +- .../tracing/IRootWireLoggingContext.java | 8 ++++++ .../netty/tracing/RootWireLoggingContext.java | 19 +++++++++++++ ...ionallyReliableLoggingHttpHandlerTest.java | 17 ++++++----- .../proxyserver/CaptureProxy.java | 4 ++- .../netty/NettyScanningHttpProxy.java | 3 +- .../netty/ProxyChannelInitializer.java | 6 ++-- .../netty/NettyScanningHttpProxyTest.java | 4 ++- ...afficToHttpTransactionAccumulatorTest.java | 9 +++++- 18 files changed, 135 insertions(+), 30 deletions(-) create mode 100644 TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java create mode 100644 TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/RootOffloaderContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java create mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java create mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 084b386f5..52c93e635 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -22,6 +22,8 @@ import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; +import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -45,7 +47,6 @@ public class KafkaCaptureFactoryTest { private String connectionId = "0242c0fffea82008-0000000a-00000003-62993a3207f92af6-9093ce33"; private String topic = "test_topic"; - @Test public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, ExecutionException, InterruptedException { final var referenceTimestamp = Instant.now(Clock.systemUTC()); @@ -77,7 +78,7 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, } private static ConnectionContext createCtx() { - return new ConnectionContext(new RootOtelContext(), "test", "test"); + return new ConnectionContext(new RootOffloaderContext(null), "test", "test"); } /** diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 96cb6842f..9cb3b253a 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -5,10 +5,11 @@ import org.opensearch.migrations.tracing.AbstractNestedSpanContext; import org.opensearch.migrations.tracing.AttributeNameMatchingPredicate; import org.opensearch.migrations.tracing.FilteringAttributeBuilder; +import org.opensearch.migrations.tracing.IRootOtelContext; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -public class ConnectionContext extends AbstractNestedSpanContext implements IConnectionContext { +public class ConnectionContext extends AbstractNestedSpanContext implements IConnectionContext { private static final AttributeNameMatchingPredicate KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT = AttributeNameMatchingPredicate.builder(true).add(CONNECTION_ID_ATTR.getKey()).build(); public static final String ACTIVE_CONNECTION = "activeConnection"; @@ -21,12 +22,12 @@ public class ConnectionContext extends AbstractNestedSpanContext initializeOpenTelemetryForCollector(endpoint, serviceName)) + .orElse(OpenTelemetrySdk.builder().build()); + } + + public RootOtelContext() { - this(null, null); + this(null); } public RootOtelContext(String collectorEndpoint, String serviceName) { - this(Optional.ofNullable(collectorEndpoint) - .map(endpoint-> initializeOpenTelemetry(serviceName, endpoint)) - .orElse(OpenTelemetrySdk.builder().build())); + this(initializeOpenTelemetry(collectorEndpoint, serviceName)); } public RootOtelContext(OpenTelemetry sdk) { - openTelemetryImpl = sdk; + openTelemetryImpl = sdk != null ? sdk : initializeOpenTelemetry(null, null); } @Override @@ -98,8 +105,13 @@ public IInstrumentConstructor getRootInstrumentationScope() { return this; } + @Override + public Meter getMeterForScope(String scopeName) { + return getOpenTelemetry().getMeter(scopeName); + } + public MeteringClosure buildSimpleMeter(IInstrumentationAttributes ctx) { - return new MeteringClosure(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); + return new MeteringClosure(ctx, getMeterForScope(ctx.getScopeName())); } public MeteringClosureForStartTimes buildMeter(IWithStartTimeAndAttributes ctx) { diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index 75a1a4d3e..e08256cd1 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -11,6 +11,9 @@ services: - ./prometheus.yaml:/etc/prometheus/prometheus.yml ports: - "9090:9090" + command: + - '--config.file=/etc/prometheus/prometheus.yml' + - '--enable-feature=exemplar-storage' # Jaeger jaeger: diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index 26573d17b..c12fda546 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -10,6 +10,7 @@ import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; import java.io.IOException; import java.util.function.Predicate; @@ -18,7 +19,7 @@ public class ConditionallyReliableLoggingHttpHandler extends LoggingHttpHandler { private final Predicate shouldBlockPredicate; - public ConditionallyReliableLoggingHttpHandler(@NonNull RootOtelContext rootContext, + public ConditionallyReliableLoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, @NonNull String nodeId, String connectionId, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate requestCapturePredicate, diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 663cdde92..f4d13ecdc 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -27,6 +27,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; @@ -138,7 +139,7 @@ public HttpRequest resetCurrentRequest() { protected HttpMessageContext messageContext; - public LoggingHttpHandler(@NonNull RootOtelContext rootContext, String nodeId, String channelKey, + public LoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, String nodeId, String channelKey, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate httpHeadersCapturePredicate) throws IOException { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java new file mode 100644 index 000000000..db9570049 --- /dev/null +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java @@ -0,0 +1,8 @@ +package org.opensearch.migrations.trafficcapture.netty.tracing; + +import io.opentelemetry.api.metrics.Meter; +import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; + +public interface IRootWireLoggingContext extends IRootOffloaderContext { + +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java new file mode 100644 index 000000000..e18aad136 --- /dev/null +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java @@ -0,0 +1,19 @@ +package org.opensearch.migrations.trafficcapture.netty.tracing; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; +import lombok.Getter; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; + +public class RootWireLoggingContext extends RootOffloaderContext implements IRootWireLoggingContext { + public static final String SCOPE_NAME = "NettyCapture"; + @Getter + Meter wireLoggingMeter; + + public RootWireLoggingContext(OpenTelemetry openTelemetry) { + super(openTelemetry); + wireLoggingMeter = super.getMeterForScope(SCOPE_NAME); + } +} diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java index a49183d6b..9a829c8b7 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java @@ -4,8 +4,6 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; -import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.trace.Tracer; import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -17,11 +15,11 @@ import org.junit.jupiter.params.provider.ValueSource; import org.opensearch.migrations.testutils.TestUtilities; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamAndByteBufferWrapper; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -47,6 +45,12 @@ public class ConditionallyReliableLoggingHttpHandlerTest { @RegisterExtension static final OpenTelemetryExtension otelTesting = OpenTelemetryExtension.create(); + private static class TestRootContext extends RootWireLoggingContext { + public TestRootContext() { + super(otelTesting.getOpenTelemetry()); + } + } + static class TestStreamManager extends OrderedStreamLifecyleManager implements AutoCloseable { AtomicReference byteBufferAtomicReference = new AtomicReference<>(); AtomicInteger flushCount = new AtomicInteger(); @@ -78,10 +82,9 @@ public CodedOutputStreamAndByteBufferWrapper createStream() { } } - private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer channelWriter) throws IOException { - var rootInstrumenter = new RootOtelContext(otelTesting.getOpenTelemetry()); + var rootInstrumenter = new TestRootContext(); var streamManager = new TestStreamManager(); var offloader = new StreamChannelConnectionCaptureSerializer("Test", "c", streamManager); @@ -155,7 +158,7 @@ private static Consumer getSingleByteAtATimeWriter(boolean useP @Test @ValueSource(booleans = {false, true}) public void testThatSuppressedCaptureWorks() throws Exception { - var rootInstrumenter = new RootOtelContext(); + var rootInstrumenter = new TestRootContext(); var streamMgr = new TestStreamManager(); var offloader = new StreamChannelConnectionCaptureSerializer("Test", "connection", streamMgr); @@ -180,7 +183,7 @@ public void testThatSuppressedCaptureWorks() throws Exception { @ParameterizedTest @ValueSource(booleans = {false, true}) public void testThatHealthCheckCaptureCanBeSuppressed(boolean singleBytes) throws Exception { - var rootInstrumenter = new RootOtelContext(); + var rootInstrumenter = new TestRootContext(); var streamMgr = new TestStreamManager(); var offloader = new StreamChannelConnectionCaptureSerializer("Test", "connection", streamMgr); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 76a2313e5..9cf36c843 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -25,6 +25,7 @@ import org.opensearch.migrations.trafficcapture.StreamLifecycleManager; import org.opensearch.migrations.trafficcapture.kafkaoffloader.KafkaCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.HeaderValueFilteringCapturePredicate; +import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; import org.opensearch.migrations.trafficcapture.proxyserver.netty.BacksideConnectionPool; import org.opensearch.migrations.trafficcapture.proxyserver.netty.NettyScanningHttpProxy; import org.opensearch.security.ssl.DefaultSecurityKeyStore; @@ -303,7 +304,8 @@ public static void main(String[] args) throws InterruptedException, IOException var params = parseArgs(args); var backsideUri = convertStringToUri(params.backsideUriString); - var rootContext = new RootOtelContext(params.otelCollectorEndpoint, "capture"); + var rootContext = new RootWireLoggingContext( + RootOtelContext.initializeOpenTelemetry(params.otelCollectorEndpoint, "capture")); var sksOp = Optional.ofNullable(params.sslConfigFilePath) .map(sslConfigFile->new DefaultSecurityKeyStore(getSettings(sslConfigFile), diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java index 38e428e5e..0602c8057 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java @@ -12,6 +12,7 @@ import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; +import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; import javax.net.ssl.SSLEngine; import java.util.function.Supplier; @@ -30,7 +31,7 @@ public int getProxyPort() { return proxyPort; } - public void start(RootOtelContext rootContext, + public void start(IRootWireLoggingContext rootContext, BacksideConnectionPool backsideConnectionPool, int numThreads, Supplier sslEngineSupplier, diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index 4f4976b5c..a44412ff3 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -11,6 +11,8 @@ import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpHandler; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; +import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; import javax.net.ssl.SSLEngine; import java.io.IOException; @@ -19,11 +21,11 @@ public class ProxyChannelInitializer extends ChannelInitializer { private final IConnectionCaptureFactory connectionCaptureFactory; private final Supplier sslEngineProvider; - private final RootOtelContext rootContext; + private final IRootWireLoggingContext rootContext; private final BacksideConnectionPool backsideConnectionPool; private final RequestCapturePredicate requestCapturePredicate; - public ProxyChannelInitializer(RootOtelContext rootContext, + public ProxyChannelInitializer(IRootWireLoggingContext rootContext, BacksideConnectionPool backsideConnectionPool, Supplier sslEngineSupplier, IConnectionCaptureFactory connectionCaptureFactory, diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java index 131047f13..aad99b72c 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java @@ -14,6 +14,7 @@ import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; +import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.ByteArrayOutputStream; @@ -199,7 +200,8 @@ private static String makeTestRequestViaClient(SimpleHttpClientForTesting client try { var connectionPool = new BacksideConnectionPool(testServerUri, null, 10, Duration.ofSeconds(10)); - nshp.get().start(new RootOtelContext(), connectionPool, 1, null, + var rootCtx = new RootWireLoggingContext(null); + nshp.get().start(rootCtx, connectionPool, 1, null, connectionCaptureFactory, new RequestCapturePredicate()); System.out.println("proxy port = " + port); } catch (InterruptedException e) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index c304ad717..8251da565 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -24,6 +24,7 @@ import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; import java.io.IOException; import java.time.Duration; @@ -93,6 +94,12 @@ public String toString() { } } + static class TestRootContext extends RootOffloaderContext { + public TestRootContext() { + super(null); + } + } + public static InMemoryConnectionCaptureFactory buildSerializerFactory(int bufferSize, Runnable onClosedCallback) { return new InMemoryConnectionCaptureFactory("TEST_NODE_ID", bufferSize, onClosedCallback); } @@ -117,7 +124,7 @@ static ByteBuf makeSequentialByteBuf(int offset, int size) { static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, AtomicInteger uniqueIdCounter, List directives) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); - var offloader = connectionFactory.createOffloader(new ConnectionContext(new RootOtelContext(), + var offloader = connectionFactory.createOffloader(new ConnectionContext(new TestRootContext(), "n", "test"), "TEST_"+uniqueIdCounter.incrementAndGet()); for (var directive : directives) { From f199e98112259a76c346fcc341d0defb0d73b0c7 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 9 Jan 2024 23:48:47 -0500 Subject: [PATCH 52/94] In-progress checkpoint (code won't compile). Setting up separate metric instruments for each of the contexts' needs. Signed-off-by: Greg Schohn --- .../tracing/ConnectionContext.java | 10 +- .../tracing/IRootOffloaderContext.java | 3 +- .../tracing/AbstractNestedSpanContext.java | 11 +- .../tracing/DirectNestedSpanContext.java | 6 +- .../tracing/FilteringAttributeBuilder.java | 9 ++ .../tracing/IInstrumentConstructor.java | 4 +- .../tracing/IInstrumentationAttributes.java | 38 ++--- .../migrations/tracing/IRootOtelContext.java | 2 +- .../IScopedInstrumentationAttributes.java | 18 ++- .../tracing/IWithStartTimeAndAttributes.java | 47 +++--- .../tracing/IWithTypedEnclosingScope.java | 2 +- .../tracing/IndirectNestedSpanContext.java | 4 +- .../migrations/tracing/MeteringClosure.java | 31 ++-- .../tracing/MeteringClosureForStartTimes.java | 38 ++--- .../migrations/tracing/RootOtelContext.java | 13 +- .../commoncontexts/IConnectionContext.java | 5 +- .../IHttpTransactionContext.java | 3 +- .../main/docker/otel-collector-config.yaml | 4 +- .../netty/tracing/RootWireLoggingContext.java | 2 - .../tracing/IKafkaConsumerContexts.java | 6 +- .../replay/tracing/IReplayContexts.java | 39 +++-- .../replay/tracing/IRootReplayerContext.java | 62 ++++++++ .../replay/tracing/KafkaConsumerContexts.java | 72 ++++++++-- .../replay/tracing/ReplayContexts.java | 70 ++++++--- .../replay/tracing/RootReplayerContext.java | 135 ++++++++++++++++++ 25 files changed, 470 insertions(+), 164 deletions(-) create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java create mode 100644 TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 9cb3b253a..870e498d4 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -27,15 +27,17 @@ public ConnectionContext(IRootOffloaderContext rootInstrumentationScope, String this.connectionId = connectionId; this.nodeId = nodeId; initializeSpan(); - //rootInstrumentationScope.getActiveConnectionsCounter(). - meterDeltaEvent(ACTIVE_CONNECTION, 1, + meterDeltaEvent(rootInstrumentationScope.getActiveConnectionsCounter(), 1, new FilteringAttributeBuilder(Attributes.builder(), KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT)); } @Override public void sendMeterEventsForEnd() { - super.sendMeterEventsForEnd(); - meterDeltaEvent(ACTIVE_CONNECTION, -1, + //super.sendMeterEventsForEnd(); +// meterIncrementEvent(getEndOfScopeMetricName()); +// meterHistogramMicros(getEndOfScopeDurationMetricName()); + + meterDeltaEvent(getRootInstrumentationScope().getActiveConnectionsCounter(), 1, new FilteringAttributeBuilder(Attributes.builder(), KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT)); } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java index 1b65d6f40..87265c90d 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java @@ -1,9 +1,10 @@ package org.opensearch.migrations.trafficcapture.tracing; +import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; -import io.opentelemetry.api.metrics.Meter; import org.opensearch.migrations.tracing.IRootOtelContext; public interface IRootOffloaderContext extends IRootOtelContext { LongUpDownCounter getActiveConnectionsCounter(); + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java index c340ff5a5..6cad272f2 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AbstractNestedSpanContext.java @@ -8,21 +8,22 @@ import java.time.Instant; -public abstract class AbstractNestedSpanContext - implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, AutoCloseable { +public abstract class AbstractNestedSpanContext + > + implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, AutoCloseable { final T enclosingScope; @Getter final Instant startTime; @Getter private Span currentSpan; - @Getter private final IInstrumentConstructor rootInstrumentationScope; + @Getter private final S rootInstrumentationScope; protected AbstractNestedSpanContext(T enclosingScope) { this.enclosingScope = enclosingScope; this.startTime = Instant.now(); - this.rootInstrumentationScope = enclosingScope.getRootInstrumentationScope(); + this.rootInstrumentationScope = (S) enclosingScope.getRootInstrumentationScope(); } @Override - public IInstrumentationAttributes getEnclosingScope() { + public IInstrumentationAttributes getEnclosingScope() { return enclosingScope; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java index 1894b1b9c..3d5667237 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java @@ -1,8 +1,8 @@ package org.opensearch.migrations.tracing; -public abstract class DirectNestedSpanContext - extends AbstractNestedSpanContext - implements IWithTypedEnclosingScope { +public abstract class DirectNestedSpanContext> + extends AbstractNestedSpanContext + implements IWithTypedEnclosingScope { public DirectNestedSpanContext(T enclosingScope) { super(enclosingScope); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java index 5dd39279b..8206a9e74 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java @@ -14,14 +14,23 @@ */ @Getter public class FilteringAttributeBuilder implements AttributesBuilder { + private AttributesBuilder underlyingBuilder; private final Predicate excludePredicate; + public FilteringAttributeBuilder(Predicate excludePredicate) { + this(Attributes.builder(), excludePredicate); + } + public FilteringAttributeBuilder(AttributesBuilder underlyingBuilder, Predicate excludePredicate) { this.underlyingBuilder = underlyingBuilder; this.excludePredicate = excludePredicate; } + public static FilteringAttributeBuilder getBuilderThatIncludesNone() { + return new FilteringAttributeBuilder(x->true); + } + @Override public Attributes build() { return underlyingBuilder.build(); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index 9f97b2219..728caab2b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -6,6 +6,6 @@ public interface IInstrumentConstructor { Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, AttributesBuilder attributesBuilder); - MeteringClosure buildSimpleMeter(IInstrumentationAttributes context); - MeteringClosureForStartTimes buildMeter(IWithStartTimeAndAttributes context); + MeteringClosure buildMeterClosure(IInstrumentationAttributes context); + MeteringClosureForStartTimes buildMeterClosure(IWithStartTimeAndAttributes context); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index b40af6672..6e41b96e7 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -2,16 +2,17 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; import lombok.NonNull; -import java.time.Duration; import java.util.ArrayList; -public interface IInstrumentationAttributes { +public interface IInstrumentationAttributes { String getScopeName(); - IInstrumentationAttributes getEnclosingScope(); - @NonNull IInstrumentConstructor getRootInstrumentationScope(); + IInstrumentationAttributes getEnclosingScope(); + @NonNull S getRootInstrumentationScope(); default Span getCurrentSpan() { return null; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { @@ -24,7 +25,7 @@ default Attributes getPopulatedAttributes(AttributesBuilder builder) { default AttributesBuilder getPopulatedAttributesBuilder(AttributesBuilder builder) { var currentObj = this; - var stack = new ArrayList(); + var stack = new ArrayList>(); while (currentObj != null) { stack.add(currentObj); currentObj = currentObj.getEnclosingScope(); @@ -36,23 +37,24 @@ default AttributesBuilder getPopulatedAttributesBuilder(AttributesBuilder builde return builder; } - default void meterIncrementEvent(String eventName) { - meterIncrementEvent(eventName, Attributes.builder()); + default void meterIncrementEvent(LongCounter c) { + meterIncrementEvent(c, Attributes.builder()); } - default void meterIncrementEvent(String eventName, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildSimpleMeter(this).meterIncrementEvent(eventName, attributesBuilder); + default void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this).meterIncrementEvent(c, attributesBuilder); } - default void meterIncrementEvent(String eventName, long increment) { - meterIncrementEvent (eventName, increment, Attributes.builder()); + default void meterIncrementEvent(LongCounter c, long increment) { + meterIncrementEvent (c, increment, Attributes.builder()); } - default void meterIncrementEvent(String eventName, long increment, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildSimpleMeter(this) - .meterIncrementEvent(eventName, increment, attributesBuilder); + default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this) + .meterIncrementEvent(c, increment, attributesBuilder); } - default void meterDeltaEvent(String eventName, long delta) { - meterDeltaEvent(eventName, delta, Attributes.builder()); + default void meterDeltaEvent(LongUpDownCounter c, long delta) { + meterDeltaEvent(c, delta, Attributes.builder()); } - default void meterDeltaEvent(String eventName, long delta, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildSimpleMeter(this).meterDeltaEvent(eventName, delta, attributesBuilder); + default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this).meterDeltaEvent(c, delta, attributesBuilder); } + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java index 45fecd78c..f715bff6c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java @@ -2,6 +2,6 @@ import io.opentelemetry.api.metrics.Meter; -public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { +public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { Meter getMeterForScope(String scopeName); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 135eee504..a7b954a2f 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -1,29 +1,27 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.trace.Span; import lombok.NonNull; -public interface IScopedInstrumentationAttributes extends IWithStartTimeAndAttributes, AutoCloseable { +public interface IScopedInstrumentationAttributes + extends IWithStartTimeAndAttributes, AutoCloseable { String getActivityName(); @Override @NonNull Span getCurrentSpan(); + LongHistogram getEndOfScopeDurationMetric(); + LongCounter getEndOfScopeCountMetric(); default void endSpan() { getCurrentSpan().end(); } - default String getEndOfScopeMetricName() { - return getActivityName() + "Count"; - } - default String getEndOfScopeDurationMetricName() { - return getActivityName() + "Duration"; - } - default void sendMeterEventsForEnd() { - meterIncrementEvent(getEndOfScopeMetricName()); - meterHistogramMicros(getEndOfScopeDurationMetricName()); + meterIncrementEvent(getEndOfScopeCountMetric()); + meterHistogramMicros(getEndOfScopeDurationMetric()); } default void close() { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 19953f82a..254db4ead 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -2,42 +2,47 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.LongHistogram; import java.time.Duration; import java.time.Instant; -public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { +public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { Instant getStartTime(); - default void meterHistogramMicros(String eventName, Duration value) { - meterHistogramMicros(eventName, value, Attributes.builder()); + default void meterHistogramMillis(LongHistogram histogram) { + meterHistogramMillis(histogram, Attributes.builder()); } - default void meterHistogramMicros(String eventName, Duration value, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, value, attributesBuilder); + default void meterHistogramMillis(LongHistogram histogram, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMillis(histogram, attributesBuilder); } - default void meterHistogramMillis(String eventName, Duration value) { - meterHistogramMillis(eventName, value, Attributes.builder()); + default void meterHistogramMillis(LongHistogram histogram, Duration value) { + meterHistogramMillis(histogram, value, Attributes.builder()); } - default void meterHistogramMillis(String eventName, Duration value, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName, value, attributesBuilder); + default void meterHistogramMillis(LongHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMillis(histogram, value, attributesBuilder); } - default void meterHistogram(String eventName, String units, long value) { - meterHistogram(eventName, units, value, Attributes.builder()); + + default void meterHistogramMicros(LongHistogram histogram, Duration value) { + meterHistogramMicros(histogram, value, Attributes.builder()); } - default void meterHistogram(String eventName, String units, long value, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeter(this).meterHistogram(eventName, units, value, attributesBuilder); + default void meterHistogramMicros(LongHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMicros(histogram, value, attributesBuilder); } - default void meterHistogramMicros(String eventName) { - meterHistogramMicros(eventName, Attributes.builder()); + default void meterHistogramMicros(LongHistogram histogram) { + meterHistogramMicros(histogram, Attributes.builder()); } - default void meterHistogramMicros(String eventName, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMicros(eventName, attributesBuilder); + default void meterHistogramMicros(LongHistogram histogram, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this) + .meterHistogramMicros(histogram, attributesBuilder); } - default void meterHistogramMillis(String eventName) { - meterHistogramMillis(eventName, Attributes.builder()); + + default void meterHistogram(LongHistogram histogram, long value) { + meterHistogram(histogram, value, Attributes.builder()); } - default void meterHistogramMillis(String eventName, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeter(this).meterHistogramMillis(eventName, attributesBuilder); + default void meterHistogram(LongHistogram histogram, long value, AttributesBuilder attributesBuilder) { + getRootInstrumentationScope().buildMeterClosure(this).meterHistogram(histogram, value, attributesBuilder); } + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java index 1802e9649..6b4af0f34 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java @@ -1,5 +1,5 @@ package org.opensearch.migrations.tracing; -public interface IWithTypedEnclosingScope extends IInstrumentationAttributes { +public interface IWithTypedEnclosingScope extends IInstrumentationAttributes { T getLogicalEnclosingScope(); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java index 741322e10..eae8f0b4e 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java @@ -3,8 +3,8 @@ import lombok.NonNull; public abstract class IndirectNestedSpanContext - - extends AbstractNestedSpanContext { + , L extends IInstrumentationAttributes> + extends AbstractNestedSpanContext { public IndirectNestedSpanContext(@NonNull D enclosingScope) { super(enclosingScope); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java index 87395fb33..4a3f0748c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; @@ -15,33 +16,29 @@ public class MeteringClosure { public final IInstrumentationAttributes ctx; public final Meter meter; - public void meterIncrementEvent(String eventName, AttributesBuilder attributesBuilder) { - meterIncrementEvent(eventName, 1, attributesBuilder); + public void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { + meterIncrementEvent(c, 1, attributesBuilder); } - public void meterIncrementEvent(String eventName, long increment, AttributesBuilder attributesBuilder) { + public void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { if (ctx == null) { return; } - meterIncrementEvent(eventName, increment, meter.counterBuilder(eventName).build(), attributesBuilder); - } - - public void meterIncrementEvent(String eventName, long increment, LongCounter c, - AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { - c.add(increment, ctx.getPopulatedAttributesBuilder(attributesBuilder) - .put("labelName", eventName) - .build()); + c.add(increment); + // c.add(increment, ctx.getPopulatedAttributesBuilder(attributesBuilder) + // .put("labelName", eventName) + // .build()); } } - public void meterDeltaEvent(String eventName, long delta, AttributesBuilder attributesBuilder) { + public void meterDeltaEvent(LongUpDownCounter c, long delta, + AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { - log.warn("Log with or without trace context?"); - meter.upDownCounterBuilder(eventName) - .build().add(delta, ctx.getPopulatedAttributesBuilder(attributesBuilder) - .put("labelName", eventName) - .build()); + c.add(delta); +// c.add(delta, ctx.getPopulatedAttributesBuilder(attributesBuilder) +// .put("labelName", eventName) +// .build()); } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java index 7bba1b207..4dd32b234 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java @@ -1,15 +1,21 @@ package org.opensearch.migrations.tracing; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.Scope; import java.time.Duration; import java.time.Instant; public class MeteringClosureForStartTimes extends MeteringClosure { + public static LongHistogram makeHistogram(Meter meter, String eventName, String units, long value) { + return meter.histogramBuilder(eventName) + .ofLongs() + .setUnit(units) + .build(); + } + MeteringClosureForStartTimes(IWithStartTimeAndAttributes ctx, Meter meter) { super(ctx, meter); } @@ -18,35 +24,33 @@ public IWithStartTimeAndAttributes getContext() { return (IWithStartTimeAndAttributes) ctx; } - public void meterHistogramMicros(String eventName, Duration between, AttributesBuilder attributesBuilder) { - meterHistogram(eventName, "us", between.toNanos()*1000, attributesBuilder); + public void meterHistogramMicros(LongHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { + meterHistogram(histogram, between.toNanos()*1000, attributesBuilder); } - public void meterHistogramMillis(String eventName, Duration between, AttributesBuilder attributesBuilder) { - meterHistogram(eventName, "ms", between.toMillis(), attributesBuilder); + public void meterHistogramMillis(LongHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { + meterHistogram(histogram, between.toMillis(), attributesBuilder); } - public void meterHistogram(String eventName, String units, long value, AttributesBuilder attributesBuilder) { + public void meterHistogram(LongHistogram h, long value, AttributesBuilder attributesBuilder) { if (ctx == null) { return; } try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { - meter.histogramBuilder(eventName) - .ofLongs() - .setUnit(units) - .build().record(value, ctx.getPopulatedAttributesBuilder(attributesBuilder) - .put("labelName", eventName) - .build()); + h.record(value); +// h.record(value, ctx.getPopulatedAttributesBuilder(attributesBuilder) +// //.put("labelName", eventName) +// .build()); } } - public void meterHistogramMillis(String eventName, AttributesBuilder attributesBuilder) { - meterHistogram(eventName, "ms", Duration.between(getContext().getStartTime(), Instant.now()).toMillis(), + public void meterHistogramMillis(LongHistogram histogram, AttributesBuilder attributesBuilder) { + meterHistogram(histogram, Duration.between(getContext().getStartTime(), Instant.now()).toMillis(), attributesBuilder); } - public void meterHistogramMicros(String eventName, AttributesBuilder attributesBuilder) { - meterHistogram(eventName, "us", + public void meterHistogramMicros(LongHistogram histogram, AttributesBuilder attributesBuilder) { + meterHistogram(histogram, Duration.between(getContext().getStartTime(), Instant.now()).toNanos()*1000, attributesBuilder); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 4d04ba48c..1ef8a7c4b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -14,7 +14,6 @@ import io.opentelemetry.sdk.logs.SdkLoggerProvider; import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.export.AggregationTemporalitySelector; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; @@ -43,7 +42,9 @@ public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String .build(); final var metricReader = PeriodicMetricReader.builder(OtlpGrpcMetricExporter.builder() .setEndpoint(collectorEndpoint) - .setAggregationTemporalitySelector(AggregationTemporalitySelector.deltaPreferred()) + // see https://opentelemetry.io/docs/specs/otel/metrics/sdk_exporters/prometheus/ + // "A Prometheus Exporter MUST only support Cumulative Temporality." + //.setAggregationTemporalitySelector(AggregationTemporalitySelector.deltaPreferred()) .build()) .setInterval(Duration.ofMillis(1000)) .build(); @@ -91,7 +92,7 @@ public String getScopeName() { } @Override - public IInstrumentationAttributes getEnclosingScope() { + public IRootOtelContext getEnclosingScope() { return null; } @@ -101,7 +102,7 @@ OpenTelemetry getOpenTelemetry() { @Override @NonNull - public IInstrumentConstructor getRootInstrumentationScope() { + public IRootOtelContext getRootInstrumentationScope() { return this; } @@ -110,11 +111,11 @@ public Meter getMeterForScope(String scopeName) { return getOpenTelemetry().getMeter(scopeName); } - public MeteringClosure buildSimpleMeter(IInstrumentationAttributes ctx) { + public MeteringClosure buildMeterClosure(IInstrumentationAttributes ctx) { return new MeteringClosure(ctx, getMeterForScope(ctx.getScopeName())); } - public MeteringClosureForStartTimes buildMeter(IWithStartTimeAndAttributes ctx) { + public MeteringClosureForStartTimes buildMeterClosure(IWithStartTimeAndAttributes ctx) { return new MeteringClosureForStartTimes(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 729f8712e..61ef72d3c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -2,10 +2,11 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IConnectionContext extends IScopedInstrumentationAttributes { +public interface IConnectionContext extends IScopedInstrumentationAttributes { static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); String CHANNEL_SCOPE = "Channel"; @@ -14,7 +15,7 @@ public interface IConnectionContext extends IScopedInstrumentationAttributes { String getNodeId(); @Override - default IInstrumentationAttributes getEnclosingScope() { return null; } + default IInstrumentationAttributes getEnclosingScope() { return null; } @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index bebcee137..a153a7ce9 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -2,9 +2,10 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.IRootOtelContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { +public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); long getSourceRequestIndex(); diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml index 0e95469c0..b32005459 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml @@ -12,8 +12,6 @@ processors: exporters: prometheus: endpoint: "0.0.0.0:8889" - const_labels: - label1: value1 logging: loglevel: debug @@ -39,4 +37,4 @@ service: metrics: receivers: [otlp] processors: [batch] - exporters: [prometheus] + exporters: [logging,prometheus] diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java index e18aad136..9c7eac91b 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java @@ -1,10 +1,8 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; import lombok.Getter; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; public class RootWireLoggingContext extends RootOffloaderContext implements IRootWireLoggingContext { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index fdfcfea6e..163368e9c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -24,14 +24,14 @@ class MetricNames { public static final String ACTIVE_PARTITIONS_ASSIGNED_COUNT = "numPartitionsAssigned"; } - interface IAsyncListeningContext extends IInstrumentationAttributes { + interface IAsyncListeningContext extends IInstrumentationAttributes { default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } } - interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { + interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { @Override default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } } - interface ITouchScopeContext extends IKafkaCommitScopeContext { + interface ITouchScopeContext extends IKafkaConsumerScope { @Override default String getActivityName() { return ActivityNames.TOUCH; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 2aa1f33cf..e4fee2e92 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -70,7 +70,7 @@ public static class MetricNames { public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; } - public interface IChannelKeyContext extends IConnectionContext { + public interface IChannelKeyContext extends IConnectionContext { @Override default String getActivityName() { return ActivityNames.CHANNEL; } @@ -92,7 +92,9 @@ default String getNodeId() { } public interface IKafkaRecordContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { @Override + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope + { @Override default String getActivityName() { return ActivityNames.RECORD_LIFETIME; } static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); @@ -106,7 +108,8 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } public interface ITrafficStreamsLifecycleContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TRAFFIC_STREAM_LIFETIME; } ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); @@ -120,7 +123,8 @@ default ISourceTrafficChannelKey getChannelKey() { } public interface IReplayerHttpTransactionContext - extends IHttpTransactionContext, IWithTypedEnclosingScope { + extends IHttpTransactionContext, + IWithTypedEnclosingScope { static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); default String getActivityName() { return ActivityNames.HTTP_TRANSACTION; } @@ -153,20 +157,23 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } public interface IRequestAccumulationContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.ACCUMULATING_REQUEST; } default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } } public interface IResponseAccumulationContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.ACCUMULATING_RESPONSE; } default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } } public interface IRequestTransformationContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TRANSFORMATION; } default String getScopeName() { return ScopeNames.HTTP_TRANSFORMER_SCOPE; } @@ -192,13 +199,15 @@ public interface IRequestTransformationContext } public interface IScheduledContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.SCHEDULED; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface ITargetRequestContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TARGET_TRANSACTION; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } @@ -207,25 +216,29 @@ public interface ITargetRequestContext } public interface IRequestSendingContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.REQUEST_SENDING; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IWaitingForHttpResponseContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.WAITING_FOR_RESPONSE; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IReceivingHttpResponseContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.RECEIVING_RESPONSE; } default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface ITupleHandlingContext - extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + extends IScopedInstrumentationAttributes, + IWithTypedEnclosingScope { default String getActivityName() { return ActivityNames.TUPLE_HANDLING; } default String getScopeName() { return ScopeNames.TRAFFIC_REPLAYER_SCOPE; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java new file mode 100644 index 000000000..82c4f1560 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java @@ -0,0 +1,62 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import org.opensearch.migrations.tracing.IRootOtelContext; + +public interface IRootReplayerContext extends IRootOtelContext { + LongHistogram getChannelDuration(); + LongHistogram getKafkaRecordDuration(); + LongHistogram getTrafficStreamLifecycleDuration(); + LongHistogram getHttpTransactionDuration(); + LongHistogram getRequestAccumulationDuration(); + LongHistogram getResponseAccumulationDuration(); + LongHistogram getRequestTransformationDuration(); + LongHistogram getScheduledDuration(); + LongHistogram getTargetRequestDuration(); + LongHistogram getRequestSendingDuration(); + LongHistogram getWaitingForResponseDuration(); + LongHistogram getReceivingResponseDuration(); + LongHistogram getTupleHandlingDuration(); + + LongHistogram getKafkaTouchDuration(); + LongHistogram getKafkaPollDuration(); + LongHistogram getCommitDuration(); + LongHistogram getKafkaCommitDuration(); + + LongHistogram getReadChunkDuration(); + LongHistogram getBackPressureDuration(); + LongHistogram getWaitForNextSignalDuration(); + + + LongCounter getChannelCounter(); + LongCounter getKafkaRecordCounter(); + LongCounter getTrafficStreamLifecycleCounter(); + LongCounter getHttpTransactionCounter(); + LongCounter getRequestAccumulationCounter(); + LongCounter getResponseAccumulationCounter(); + LongCounter getRequestTransformationCounter(); + LongCounter getScheduledCounter(); + LongCounter getTargetRequestCounter(); + LongCounter getRequestSendingCounter(); + LongCounter getWaitingForResponseCounter(); + LongCounter getReceivingResponseCounter(); + LongCounter getTupleHandlingCounter(); + + + LongCounter getKafkaTouchCounter(); + LongCounter getKafkaPollCounter(); + LongCounter getCommitCounter(); + LongCounter getKafkaCommitCounter(); + + LongCounter getReadChunkCounter(); + LongCounter getBackPressureCounter(); + LongCounter getWaitForNextSignalCounter(); + + LongUpDownCounter getActiveChannelsCounter(); + LongCounter getKafkaRecordBytesCounter(); + LongCounter getKafkaPartitionsRevokedCounter(); + LongCounter getKafkaPartitionsAssignedCounter(); + LongUpDownCounter getKafkaActivePartitionsCounter(); +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 9e6dec74b..e2205c1d6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -1,11 +1,12 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NonNull; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import java.util.Collection; @@ -18,60 +19,103 @@ private KafkaConsumerContexts() {} public static class AsyncListeningContext implements IKafkaConsumerContexts.IAsyncListeningContext { @Getter @NonNull - private final IInstrumentationAttributes enclosingScope; + private final IInstrumentationAttributes enclosingScope; @Override - public @NonNull IInstrumentConstructor getRootInstrumentationScope() { + public @NonNull IRootReplayerContext getRootInstrumentationScope() { return enclosingScope.getRootInstrumentationScope(); } public void onPartitionsRevoked(Collection partitions) { - meterIncrementEvent(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT); + meterIncrementEvent(getRootInstrumentationScope().getKafkaPartitionsRevokedCounter()); onParitionsAssignedChanged(partitions.size()); } public void onPartitionsAssigned(Collection partitions) { - meterIncrementEvent(IKafkaConsumerContexts.MetricNames.PARTITIONS_ASSIGNED_EVENT_COUNT); + meterIncrementEvent(getRootInstrumentationScope().getKafkaPartitionsAssignedCounter()); onParitionsAssignedChanged(partitions.size()); } private void onParitionsAssignedChanged(int delta) { - meterDeltaEvent(IKafkaConsumerContexts.MetricNames.ACTIVE_PARTITIONS_ASSIGNED_COUNT, delta); + meterDeltaEvent(getRootInstrumentationScope().getKafkaActivePartitionsCounter(), delta); } } - public static class TouchScopeContext extends DirectNestedSpanContext - implements IKafkaConsumerContexts.ITouchScopeContext + public static class TouchScopeContext + extends DirectNestedSpanContext> + implements IKafkaConsumerContexts.ITouchScopeContext { - public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getKafkaTouchDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getKafkaTouchCounter(); + } } - public static class PollScopeContext extends DirectNestedSpanContext + public static class PollScopeContext + extends DirectNestedSpanContext> implements IKafkaConsumerContexts.IPollScopeContext { - public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getKafkaPollDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getKafkaPollCounter(); + } } - public static class CommitScopeContext extends DirectNestedSpanContext + public static class CommitScopeContext + extends DirectNestedSpanContext> implements IKafkaConsumerContexts.ICommitScopeContext { - public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getCommitDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getCommitCounter(); + } } public static class KafkaCommitScopeContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IKafkaConsumerContexts.IKafkaCommitScopeContext { public KafkaCommitScopeContext(@NonNull KafkaConsumerContexts.CommitScopeContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getKafkaCommitDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getKafkaCommitCounter(); + } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index c23f11444..1f35d6292 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -1,5 +1,7 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; @@ -16,12 +18,23 @@ public class ReplayContexts { private ReplayContexts() {} - public static class ChannelKeyContext extends AbstractNestedSpanContext + public static class ChannelKeyContext + extends AbstractNestedSpanContext> implements IReplayContexts.IChannelKeyContext { @Getter final ISourceTrafficChannelKey channelKey; + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getChannelDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getChannelCounter(); + } - public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { + public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { super(enclosingScope); this.channelKey = channelKey; initializeSpan(); @@ -36,15 +49,16 @@ public String toString() { @Override public void onTargetConnectionCreated() { - meterDeltaEvent(IReplayContexts.MetricNames.ACTIVE_TARGET_CONNECTIONS, 1); + meterDeltaEvent(getRootInstrumentationScope().getActiveChannelsCounter(), 1); } @Override public void onTargetConnectionClosed() { - meterDeltaEvent(IReplayContexts.MetricNames.ACTIVE_TARGET_CONNECTIONS, -1); + meterDeltaEvent(getRootInstrumentationScope().getActiveChannelsCounter(), -1); } } - public static class KafkaRecordContext extends DirectNestedSpanContext + public static class KafkaRecordContext + extends DirectNestedSpanContext implements IReplayContexts.IKafkaRecordContext { final String recordId; @@ -54,18 +68,28 @@ public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, Str super(enclosingScope); this.recordId = recordId; initializeSpan(); - this.meterIncrementEvent(IReplayContexts.MetricNames.KAFKA_RECORD_READ); - this.meterIncrementEvent(IReplayContexts.MetricNames.KAFKA_BYTES_READ, recordSize); + this.meterIncrementEvent(getRootInstrumentationScope().getKafkaRecordCounter()); + this.meterIncrementEvent(getRootInstrumentationScope().getKafkaRecordBytesCounter(), recordSize); } @Override public String getRecordId() { return recordId; } + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getKafkaRecordDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getKafkaRecordCounter(); + } } public static class TrafficStreamsLifecycleContext - extends IndirectNestedSpanContext + extends IndirectNestedSpanContext implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; @@ -91,10 +115,20 @@ public ITrafficStreamKey getTrafficStreamKey() { public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { return getImmediateEnclosingScope().getLogicalEnclosingScope(); } + + @Override + public LongHistogram getEndOfScopeDurationMetric() { + return getRootInstrumentationScope().getTrafficStreamLifecycleDuration(); + } + + @Override + public LongCounter getEndOfScopeCountMetric() { + return getRootInstrumentationScope().getTrafficStreamLifecycleCounter(); + } } public static class HttpTransactionContext - extends IndirectNestedSpanContext + extends IndirectNestedSpanContext implements IReplayContexts.IReplayerHttpTransactionContext { final UniqueReplayerRequestKey replayerRequestKey; @Getter final Instant timeOfOriginalRequest; @@ -129,7 +163,7 @@ public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { } public static class RequestAccumulationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestAccumulationContext { public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); @@ -138,7 +172,7 @@ public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContex } public static class ResponseAccumulationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IResponseAccumulationContext { public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); @@ -147,7 +181,7 @@ public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionConte } public static class RequestTransformationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestTransformationContext { public RequestTransformationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); @@ -215,7 +249,7 @@ public void aggregateOutputChunk(int sizeInBytes) { } public static class ScheduledContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IScheduledContext { private final Instant scheduledFor; @@ -236,7 +270,7 @@ public void sendMeterEventsForEnd() { } public static class TargetRequestContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.ITargetRequestContext { public TargetRequestContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); @@ -256,7 +290,7 @@ public void onBytesReceived(int size) { } public static class RequestSendingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestSendingContext { public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); @@ -265,7 +299,7 @@ public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScop } public static class WaitingForHttpResponseContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IWaitingForHttpResponseContext { public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); @@ -274,7 +308,7 @@ public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclo } public static class ReceivingHttpResponseContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IReceivingHttpResponseContext { public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); @@ -283,7 +317,7 @@ public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclos } public static class TupleHandlingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.ITupleHandlingContext { public TupleHandlingContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java new file mode 100644 index 000000000..86d58d62d --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -0,0 +1,135 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; +import org.opensearch.migrations.tracing.RootOtelContext; + +import lombok.Getter; + +@Getter +public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { + public final LongHistogram channelDuration; + public final LongHistogram kafkaRecordDuration; + public final LongHistogram trafficStreamLifecycleDuration; + public final LongHistogram httpTransactionDuration; + public final LongHistogram requestAccumulationDuration; + public final LongHistogram responseAccumulationDuration; + public final LongHistogram requestTransformationDuration; + public final LongHistogram scheduledDuration; + public final LongHistogram targetRequestDuration; + public final LongHistogram requestSendingDuration; + public final LongHistogram waitingForResponseDuration; + public final LongHistogram receivingResponseDuration; + public final LongHistogram tupleHandlingDuration; + + public final LongHistogram kafkaTouchDuration; + public final LongHistogram kafkaPollDuration; + public final LongHistogram commitDuration; + public final LongHistogram kafkaCommitDuration; + + public final LongHistogram readChunkDuration; + public final LongHistogram backPressureDuration; + public final LongHistogram waitForNextSignalDuration; + + + public final LongCounter channelCounter; + public final LongCounter kafkaRecordCounter; + public final LongCounter trafficStreamLifecycleCounter; + public final LongCounter httpTransactionCounter; + public final LongCounter requestAccumulationCounter; + public final LongCounter responseAccumulationCounter; + public final LongCounter requestTransformationCounter; + public final LongCounter scheduledCounter; + public final LongCounter targetRequestCounter; + public final LongCounter requestSendingCounter; + public final LongCounter waitingForResponseCounter; + public final LongCounter receivingResponseCounter; + public final LongCounter tupleHandlingCounter; + + public final LongCounter kafkaTouchCounter; + public final LongCounter kafkaPollCounter; + public final LongCounter commitCounter; + public final LongCounter kafkaCommitCounter; + + public final LongCounter readChunkCounter; + public final LongCounter backPressureCounter; + public final LongCounter waitForNextSignalCounter; + + public final LongUpDownCounter activeChannelsCounter; + public final LongCounter kafkaRecordBytesCounter; + public final LongCounter kafkaPartitionsRevokedCounter; + public final LongCounter kafkaPartitionsAssignedCounter; + public final LongUpDownCounter kafkaActivePartitionsCounter; + + public RootReplayerContext(MeterProvider meterProvider) { + channelDuration = buildHistogram(meterProvider, ); + kafkaRecordDuration = buildHistogram(meterProvider, );; + trafficStreamLifecycleDuration = buildHistogram(meterProvider, ); + httpTransactionDuration = buildHistogram(meterProvider, ); + requestAccumulationDuration = buildHistogram(meterProvider, ); + responseAccumulationDuration = buildHistogram(meterProvider, ); + requestTransformationDuration = buildHistogram(meterProvider, ); + scheduledDuration = buildHistogram(meterProvider, ); + targetRequestDuration = buildHistogram(meterProvider, ); + requestSendingDuration = buildHistogram(meterProvider, ); + waitingForResponseDuration = buildHistogram(meterProvider, ); + receivingResponseDuration = buildHistogram(meterProvider, ); + tupleHandlingDuration = buildHistogram(meterProvider, ); + + kafkaTouchDuration = buildHistogram(meterProvider, ); + kafkaPollDuration = buildHistogram(meterProvider, ); + commitDuration = buildHistogram(meterProvider, ); + kafkaCommitDuration = buildHistogram(meterProvider, ); + + readChunkDuration = buildHistogram(meterProvider, ); + backPressureDuration = buildHistogram(meterProvider, ); + waitForNextSignalDuration = buildHistogram(meterProvider, ); + + + channelCounter = buildCounter(meterProvider, ); + kafkaRecordCounter = buildCounter(meterProvider, ); + trafficStreamLifecycleCounter = buildCounter(meterProvider, ); + httpTransactionCounter = buildCounter(meterProvider, ); + requestAccumulationCounter = buildCounter(meterProvider, ); + responseAccumulationCounter = buildCounter(meterProvider, ); + requestTransformationCounter = buildCounter(meterProvider, ); + scheduledCounter = buildCounter(meterProvider, ); + targetRequestCounter = buildCounter(meterProvider, ); + requestSendingCounter = buildCounter(meterProvider, ); + waitingForResponseCounter = buildCounter(meterProvider, ); + receivingResponseCounter = buildCounter(meterProvider, ); + tupleHandlingCounter = buildCounter(meterProvider, ); + + kafkaTouchCounter = buildCounter(meterProvider, ); + kafkaPollCounter = buildCounter(meterProvider, ); + commitCounter = buildCounter(meterProvider, ); + kafkaCommitCounter = buildCounter(meterProvider, ); + + readChunkCounter = buildCounter(meterProvider, ); + backPressureCounter = buildCounter(meterProvider, ); + waitForNextSignalCounter = buildCounter(meterProvider, ); + + activeChannelsCounter = buildUpDownCounter(meterProvider, IReplayContexts.MetricNames.KAFKA_RECORD_READ); + kafkaRecordBytesCounter = buildCounter(meterProvider, IReplayContexts.MetricNames.KAFKA_BYTES_READ); + kafkaPartitionsRevokedCounter = + buildCounter(meterProvider, IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT); + kafkaPartitionsAssignedCounter = + buildCounter(meterProvider, IKafkaConsumerContexts.MetricNames.PARTITIONS_ASSIGNED_EVENT_COUNT); + kafkaActivePartitionsCounter = + buildUpDownCounter(meterProvider, IKafkaConsumerContexts.MetricNames.ACTIVE_PARTITIONS_ASSIGNED_COUNT); + } + + private static LongCounter buildCounter(MeterProvider meterProvider, String eventName) { + meterProvider.get(); + } + + private static LongCounter buildHistogram(MeterProvider meterProvider, String eventName) { + + } + + private static LongUpDownCounter buildUpDownCounter(MeterProvider meterProvider, String eventName) { + + } +} From e110540f0e067604c0b119a6c774049fda90cd31 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 10 Jan 2024 11:19:50 -0500 Subject: [PATCH 53/94] Another in-progress checkpoint (still won't compile) where I'm moving metric instruments into the context classes. I'm not happy with the scope type propagation through so much of the interface hierarchy, so I'm going to revisit that next. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 4 +- .../tracing/IRootOffloaderContext.java | 5 +- .../CommonScopedMetricInstruments.java | 45 ++++++ .../tracing/DirectNestedSpanContext.java | 3 +- .../tracing/IInstrumentConstructor.java | 8 +- .../tracing/IInstrumentationAttributes.java | 6 +- .../migrations/tracing/IRootOtelContext.java | 8 +- .../IScopedInstrumentationAttributes.java | 14 +- .../tracing/IWithStartTimeAndAttributes.java | 22 +-- .../migrations/tracing/MeteringClosure.java | 11 +- .../tracing/MeteringClosureForStartTimes.java | 32 ++--- .../migrations/tracing/RootOtelContext.java | 32 +++-- .../commoncontexts/IConnectionContext.java | 4 +- .../IHttpTransactionContext.java | 3 +- .../migrations/tracing/TestContext.java | 4 - .../netty/tracing/HttpMessageContext.java | 8 +- .../tracing/IKafkaConsumerContexts.java | 33 +++-- .../replay/tracing/IReplayContexts.java | 35 ++--- .../replay/tracing/IRootReplayerContext.java | 59 +------- .../tracing/ITrafficSourceContexts.java | 3 +- .../replay/tracing/KafkaConsumerContexts.java | 80 +++++++---- .../replay/tracing/ReplayContexts.java | 11 +- .../replay/tracing/RootReplayerContext.java | 134 ++---------------- .../traffic/source/InputStreamOfTraffic.java | 3 +- .../TestTrafficStreamsLifecycleContext.java | 3 +- 25 files changed, 238 insertions(+), 332 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 288b1917e..dc2d08497 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -9,6 +9,8 @@ public class KafkaRecordContext extends DirectNestedSpanContext implements IScopedInstrumentationAttributes { + public static final String SCOPE_NAME = "KafkaCapture"; + static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); static final AttributeKey RECORD_SIZE_ATTR = AttributeKey.longKey("recordSize"); @@ -28,7 +30,7 @@ public KafkaRecordContext(IConnectionContext enclosingScope, String topic, Strin initializeSpan(); } - @Override public String getScopeName() { return "KafkaCapture"; } + @Override public String getScopeName() { return SCOPE_NAME; } @Override public String getActivityName() { return "stream_flush_called"; } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java index 87265c90d..e6e9cb913 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java @@ -1,10 +1,11 @@ package org.opensearch.migrations.trafficcapture.tracing; -import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; -public interface IRootOffloaderContext extends IRootOtelContext { +public interface IRootOffloaderContext extends IInstrumentConstructor { LongUpDownCounter getActiveConnectionsCounter(); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java new file mode 100644 index 000000000..8b1b1a0a5 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java @@ -0,0 +1,45 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.MeterProvider; +import lombok.extern.slf4j.Slf4j; + +import java.util.stream.Collectors; +import java.util.stream.DoubleStream; +import java.util.stream.IntStream; + +@Slf4j +public class CommonScopedMetricInstruments { + public final LongCounter contextCounter; + public final DoubleHistogram contextDuration; + public CommonScopedMetricInstruments(MeterProvider meterProvider, String scopeName, String activityName) { + var meter = meterProvider.get(scopeName); + contextCounter = meter + .counterBuilder(activityName + "Count").build(); + contextDuration = meter + .histogramBuilder(activityName + "Duration") + .setUnit("ms") + .build(); + } + + public CommonScopedMetricInstruments(MeterProvider meterProvider, String scopeName, String activityName, + double firstBucketSize, double lastBucketCeiling) { + var meter = meterProvider.get(scopeName); + contextCounter = meter + .counterBuilder(activityName + "Count").build(); + double[] bucketBoundary = new double[]{firstBucketSize}; + var buckets = DoubleStream.generate(()->{ + var tmp = bucketBoundary[0]; + bucketBoundary[0] *= 2.0; + return tmp; + }).takeWhile(v->v<=lastBucketCeiling).boxed().collect(Collectors.toList()); + log.atInfo().setMessage(()->"Setting buckets for "+scopeName+":"+activityName+" to "+ + buckets.stream().map(x->""+x).collect(Collectors.joining(",","[","]"))).log(); + contextDuration = meter + .histogramBuilder(activityName + "Duration") + .setUnit("ms") + .setExplicitBucketBoundariesAdvice(buckets) + .build(); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java index 3d5667237..d8845b07d 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.tracing; -public abstract class DirectNestedSpanContext> +public abstract class DirectNestedSpanContext, + T extends IInstrumentationAttributes> extends AbstractNestedSpanContext implements IWithTypedEnclosingScope { public DirectNestedSpanContext(T enclosingScope) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index 728caab2b..e56e70a10 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -3,9 +3,9 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; -public interface IInstrumentConstructor { - Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, +public interface IInstrumentConstructor { + Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, AttributesBuilder attributesBuilder); - MeteringClosure buildMeterClosure(IInstrumentationAttributes context); - MeteringClosureForStartTimes buildMeterClosure(IWithStartTimeAndAttributes context); + MeteringClosure buildSimpleMeterClosure(IInstrumentationAttributes context); + MeteringClosureForStartTimes buildMeterClosure(IWithStartTimeAndAttributes context); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 6e41b96e7..e5da21ce9 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -41,20 +41,20 @@ default void meterIncrementEvent(LongCounter c) { meterIncrementEvent(c, Attributes.builder()); } default void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this).meterIncrementEvent(c, attributesBuilder); + getRootInstrumentationScope().buildSimpleMeterClosure(this).meterIncrementEvent(c, attributesBuilder); } default void meterIncrementEvent(LongCounter c, long increment) { meterIncrementEvent (c, increment, Attributes.builder()); } default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this) + getRootInstrumentationScope().buildSimpleMeterClosure(this) .meterIncrementEvent(c, increment, attributesBuilder); } default void meterDeltaEvent(LongUpDownCounter c, long delta) { meterDeltaEvent(c, delta, Attributes.builder()); } default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this).meterDeltaEvent(c, delta, attributesBuilder); + getRootInstrumentationScope().buildSimpleMeterClosure(this).meterDeltaEvent(c, delta, attributesBuilder); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java index f715bff6c..b9b05c992 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java @@ -1,7 +1,11 @@ package org.opensearch.migrations.tracing; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.MeterProvider; -public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { - Meter getMeterForScope(String scopeName); +public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { + MeterProvider getMeterProvider(); + default Meter getMeterForScope(String scopeName) { + return getMeterProvider().get(scopeName); + } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index a7b954a2f..8ad2f2627 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -1,20 +1,24 @@ package org.opensearch.migrations.tracing; import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.trace.Span; import lombok.NonNull; -public interface IScopedInstrumentationAttributes +public interface IScopedInstrumentationAttributes> extends IWithStartTimeAndAttributes, AutoCloseable { String getActivityName(); @Override @NonNull Span getCurrentSpan(); - LongHistogram getEndOfScopeDurationMetric(); - LongCounter getEndOfScopeCountMetric(); - + CommonScopedMetricInstruments getMetrics(); + default LongCounter getEndOfScopeCountMetric() { + return getMetrics().contextCounter; + } + default DoubleHistogram getEndOfScopeDurationMetric() { + return getMetrics().contextDuration; + } default void endSpan() { getCurrentSpan().end(); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 254db4ead..2c369bbd3 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -2,7 +2,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.DoubleHistogram; import java.time.Duration; import java.time.Instant; @@ -11,37 +11,37 @@ public interface IWithStartTimeAndAttributes e Instant getStartTime(); - default void meterHistogramMillis(LongHistogram histogram) { + default void meterHistogramMillis(DoubleHistogram histogram) { meterHistogramMillis(histogram, Attributes.builder()); } - default void meterHistogramMillis(LongHistogram histogram, AttributesBuilder attributesBuilder) { + default void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMillis(histogram, attributesBuilder); } - default void meterHistogramMillis(LongHistogram histogram, Duration value) { + default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { meterHistogramMillis(histogram, value, Attributes.builder()); } - default void meterHistogramMillis(LongHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { + default void meterHistogramMillis(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMillis(histogram, value, attributesBuilder); } - default void meterHistogramMicros(LongHistogram histogram, Duration value) { + default void meterHistogramMicros(DoubleHistogram histogram, Duration value) { meterHistogramMicros(histogram, value, Attributes.builder()); } - default void meterHistogramMicros(LongHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { + default void meterHistogramMicros(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMicros(histogram, value, attributesBuilder); } - default void meterHistogramMicros(LongHistogram histogram) { + default void meterHistogramMicros(DoubleHistogram histogram) { meterHistogramMicros(histogram, Attributes.builder()); } - default void meterHistogramMicros(LongHistogram histogram, AttributesBuilder attributesBuilder) { + default void meterHistogramMicros(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { getRootInstrumentationScope().buildMeterClosure(this) .meterHistogramMicros(histogram, attributesBuilder); } - default void meterHistogram(LongHistogram histogram, long value) { + default void meterHistogram(DoubleHistogram histogram, long value) { meterHistogram(histogram, value, Attributes.builder()); } - default void meterHistogram(LongHistogram histogram, long value, AttributesBuilder attributesBuilder) { + default void meterHistogram(DoubleHistogram histogram, long value, AttributesBuilder attributesBuilder) { getRootInstrumentationScope().buildMeterClosure(this).meterHistogram(histogram, value, attributesBuilder); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java index 4a3f0748c..dcc0c8dc8 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java @@ -12,9 +12,8 @@ @Slf4j @AllArgsConstructor -public class MeteringClosure { - public final IInstrumentationAttributes ctx; - public final Meter meter; +public class MeteringClosure { + public final IInstrumentationAttributes ctx; public void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { meterIncrementEvent(c, 1, attributesBuilder); @@ -26,9 +25,6 @@ public void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder } try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { c.add(increment); - // c.add(increment, ctx.getPopulatedAttributesBuilder(attributesBuilder) - // .put("labelName", eventName) - // .build()); } } @@ -36,9 +32,6 @@ public void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { c.add(delta); -// c.add(delta, ctx.getPopulatedAttributesBuilder(attributesBuilder) -// .put("labelName", eventName) -// .build()); } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java index 4dd32b234..6d8c85db9 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java @@ -1,55 +1,45 @@ package org.opensearch.migrations.tracing; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.Meter; import java.time.Duration; import java.time.Instant; -public class MeteringClosureForStartTimes extends MeteringClosure { +public class MeteringClosureForStartTimes extends MeteringClosure { - public static LongHistogram makeHistogram(Meter meter, String eventName, String units, long value) { - return meter.histogramBuilder(eventName) - .ofLongs() - .setUnit(units) - .build(); + MeteringClosureForStartTimes(IWithStartTimeAndAttributes ctx) { + super(ctx); } - MeteringClosureForStartTimes(IWithStartTimeAndAttributes ctx, Meter meter) { - super(ctx, meter); + public IWithStartTimeAndAttributes getContext() { + return (IWithStartTimeAndAttributes) ctx; } - public IWithStartTimeAndAttributes getContext() { - return (IWithStartTimeAndAttributes) ctx; - } - - public void meterHistogramMicros(LongHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { + public void meterHistogramMicros(DoubleHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { meterHistogram(histogram, between.toNanos()*1000, attributesBuilder); } - public void meterHistogramMillis(LongHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { + public void meterHistogramMillis(DoubleHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { meterHistogram(histogram, between.toMillis(), attributesBuilder); } - public void meterHistogram(LongHistogram h, long value, AttributesBuilder attributesBuilder) { + public void meterHistogram(DoubleHistogram h, long value, AttributesBuilder attributesBuilder) { if (ctx == null) { return; } try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { h.record(value); -// h.record(value, ctx.getPopulatedAttributesBuilder(attributesBuilder) -// //.put("labelName", eventName) -// .build()); } } - public void meterHistogramMillis(LongHistogram histogram, AttributesBuilder attributesBuilder) { + public void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { meterHistogram(histogram, Duration.between(getContext().getStartTime(), Instant.now()).toMillis(), attributesBuilder); } - public void meterHistogramMicros(LongHistogram histogram, AttributesBuilder attributesBuilder) { + public void meterHistogramMicros(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { meterHistogram(histogram, Duration.between(getContext().getStartTime(), Instant.now()).toNanos()*1000, attributesBuilder); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 1ef8a7c4b..3dd341d3a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -4,6 +4,7 @@ import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.context.Context; @@ -25,7 +26,7 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; -public class RootOtelContext implements IRootOtelContext { +public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @@ -92,7 +93,7 @@ public String getScopeName() { } @Override - public IRootOtelContext getEnclosingScope() { + public RootOtelContext getEnclosingScope() { return null; } @@ -101,22 +102,25 @@ OpenTelemetry getOpenTelemetry() { } @Override - @NonNull - public IRootOtelContext getRootInstrumentationScope() { - return this; + public MeterProvider getMeterProvider() { + return getOpenTelemetry().getMeterProvider(); } @Override - public Meter getMeterForScope(String scopeName) { - return getOpenTelemetry().getMeter(scopeName); - } + @NonNull + public S getRootInstrumentationScope() { + return (S) this; + } // CRTP so that callers can get more specific - public MeteringClosure buildMeterClosure(IInstrumentationAttributes ctx) { - return new MeteringClosure(ctx, getMeterForScope(ctx.getScopeName())); + @Override + public MeteringClosure buildSimpleMeterClosure(IInstrumentationAttributes ctx) { + return new MeteringClosure<>(ctx); } - public MeteringClosureForStartTimes buildMeterClosure(IWithStartTimeAndAttributes ctx) { - return new MeteringClosureForStartTimes(ctx, getOpenTelemetry().getMeter(ctx.getScopeName())); + @Override + public MeteringClosureForStartTimes + buildMeterClosure(IWithStartTimeAndAttributes ctx) { + return new MeteringClosureForStartTimes<>(ctx); } @Override @@ -131,8 +135,8 @@ private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, S } @Override - public Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, - AttributesBuilder attributesBuilder) { + public Span buildSpan(IInstrumentationAttributes enclosingScope, + String scopeName, String spanName, AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 61ef72d3c..a7f9c4c69 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -7,9 +7,10 @@ import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface IConnectionContext extends IScopedInstrumentationAttributes { + String CHANNEL_SCOPE = "Channel"; + String SCOPE_NAME = CHANNEL_SCOPE; static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); - String CHANNEL_SCOPE = "Channel"; String getConnectionId(); String getNodeId(); @@ -22,5 +23,4 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder.put(CONNECTION_ID_ATTR, getConnectionId()) .put(NODE_ID_ATTR, getNodeId()); } - default String getScopeName() { return CHANNEL_SCOPE; } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index a153a7ce9..871e88ed0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -2,10 +2,11 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { +public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); long getSourceRequestIndex(); diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index b36e3d577..e2e4d335d 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -52,10 +52,6 @@ public TestContext(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter var openTel = otelBuilder.build(); rootInstrumentationScope = new RootOtelContext(openTel); } - @Override - public String getScopeName() { - return "TestContext"; - } @Override public IInstrumentationAttributes getEnclosingScope() { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java index 65e13c8d5..45eadadee 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java @@ -10,6 +10,7 @@ public class HttpMessageContext extends DirectNestedSpanContext implements IHttpTransactionContext, IWithStartTimeAndAttributes { + public static final String SCOPE_NAME = "CapturingHttpHandler"; public static final String GATHERING_REQUEST = "gatheringRequest"; public static final String BLOCKED = "blocked"; @@ -26,8 +27,6 @@ public enum HttpTransactionState { @Getter final long sourceRequestIndex; @Getter - final Instant startTime; - @Getter final HttpTransactionState state; static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state) { @@ -49,7 +48,6 @@ static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestIndex, HttpTransactionState state) { super(enclosingScope); this.sourceRequestIndex = sourceRequestIndex; - this.startTime = Instant.now(); this.state = state; initializeSpan(); } @@ -58,6 +56,6 @@ public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestI public String getActivityName() { return getSpanLabelForState(state); } - - @Override public String getScopeName() { return "CapturingHttpHandler"; } + @Override + public String getScopeName() { return SCOPE_NAME; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index 163368e9c..4d0abf41a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.replay.tracing; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; @@ -19,40 +20,46 @@ private ActivityNames() {} } class MetricNames { + private MetricNames() {} public static final String PARTITIONS_ASSIGNED_EVENT_COUNT = "partitionsAssigned"; public static final String PARTITIONS_REVOKED_EVENT_COUNT = "partitionsRevoked"; public static final String ACTIVE_PARTITIONS_ASSIGNED_COUNT = "numPartitionsAssigned"; } - interface IAsyncListeningContext extends IInstrumentationAttributes { - default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } + interface IAsyncListeningContext extends IInstrumentationAttributes { + String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; + @Override default String getScopeName() { return SCOPE_NAME; } } - interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { - @Override - default String getScopeName() { return ScopeNames.KAFKA_CONSUMER_SCOPE; } + interface IKafkaConsumerScope> extends IScopedInstrumentationAttributes { + String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; + @Override default String getScopeName() { return SCOPE_NAME; } } - interface ITouchScopeContext extends IKafkaConsumerScope { + interface ITouchScopeContext> extends IKafkaConsumerScope { + String ACTIVITY_NAME = ActivityNames.TOUCH; @Override - default String getActivityName() { return ActivityNames.TOUCH; } + default String getActivityName() { return ACTIVITY_NAME; } } - interface IPollScopeContext extends IKafkaConsumerScope { + interface IPollScopeContext> extends IKafkaConsumerScope { + String ACTIVITY_NAME = ActivityNames.KAFKA_POLL; @Override - default String getActivityName() { return ActivityNames.KAFKA_POLL; } + default String getActivityName() { return ACTIVITY_NAME; } } /** * Context for the KafkaConsumer's bookkeeping around and including the commit service call */ - interface ICommitScopeContext extends IKafkaConsumerScope { + interface ICommitScopeContext> extends IKafkaConsumerScope { + String ACTIVITY_NAME = ActivityNames.COMMIT; @Override - default String getActivityName() { return ActivityNames.COMMIT; } + default String getActivityName() { return ACTIVITY_NAME; } } /** * Context for ONLY the service call to Kafka to perform the commit. */ - interface IKafkaCommitScopeContext extends IKafkaConsumerScope { + interface IKafkaCommitScopeContext> extends IKafkaConsumerScope{ + String ACTIVITY_NAME = ActivityNames.KAFKA_COMMIT; @Override - default String getActivityName() { return ActivityNames.KAFKA_COMMIT; } + default String getActivityName() { return ACTIVITY_NAME; } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index e4fee2e92..4a8e9c7cd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -71,6 +71,8 @@ public static class MetricNames { } public interface IChannelKeyContext extends IConnectionContext { + String SCOPE_NAME = "Connection"; + @Override default String getActivityName() { return ActivityNames.CHANNEL; } @@ -94,14 +96,16 @@ default String getNodeId() { public interface IKafkaRecordContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope - { @Override + { + String SCOPE_NAME = ScopeNames.KAFKA_RECORD_SCOPE; + + @Override default String getActivityName() { return ActivityNames.RECORD_LIFETIME; } static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); String getRecordId(); - default String getScopeName() { return ScopeNames.KAFKA_RECORD_SCOPE; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { return IScopedInstrumentationAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); } @@ -110,13 +114,13 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { public interface ITrafficStreamsLifecycleContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.TRAFFIC_STREAM_LIFETIME_SCOPE; default String getActivityName() { return ActivityNames.TRAFFIC_STREAM_LIFETIME; } ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); default String getConnectionId() { return getChannelKey().getConnectionId(); } - default String getScopeName() { return ScopeNames.TRAFFIC_STREAM_LIFETIME_SCOPE; } default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -125,7 +129,8 @@ default ISourceTrafficChannelKey getChannelKey() { public interface IReplayerHttpTransactionContext extends IHttpTransactionContext, IWithTypedEnclosingScope { - static final AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); + String SCOPE_NAME = ScopeNames.ACCUMULATOR_SCOPE; + AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); default String getActivityName() { return ActivityNames.HTTP_TRANSACTION; } @@ -133,7 +138,6 @@ public interface IReplayerHttpTransactionContext IChannelKeyContext getChannelKeyContext(); Instant getTimeOfOriginalRequest(); - @Override default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } default String getConnectionId() { return getChannelKey().getConnectionId(); } @@ -159,24 +163,23 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { public interface IRequestAccumulationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.ACCUMULATOR_SCOPE; default String getActivityName() { return ActivityNames.ACCUMULATING_REQUEST; } - - default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } } public interface IResponseAccumulationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.ACCUMULATOR_SCOPE; default String getActivityName() { return ActivityNames.ACCUMULATING_RESPONSE; } - default String getScopeName() { return ScopeNames.ACCUMULATOR_SCOPE; } } public interface IRequestTransformationContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { - default String getActivityName() { return ActivityNames.TRANSFORMATION; } - default String getScopeName() { return ScopeNames.HTTP_TRANSFORMER_SCOPE; } + String SCOPE_NAME = ScopeNames.HTTP_TRANSFORMER_SCOPE; + default String getActivityName() { return ActivityNames.TRANSFORMATION; } void onHeaderParse(); void onPayloadParse(); @@ -201,15 +204,15 @@ public interface IRequestTransformationContext public interface IScheduledContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; default String getActivityName() { return ActivityNames.SCHEDULED; } - default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface ITargetRequestContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; default String getActivityName() { return ActivityNames.TARGET_TRANSACTION; } - default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } void onBytesSent(int size); void onBytesReceived(int size); @@ -218,28 +221,28 @@ public interface ITargetRequestContext public interface IRequestSendingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; default String getActivityName() { return ActivityNames.REQUEST_SENDING; } - default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IWaitingForHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; default String getActivityName() { return ActivityNames.WAITING_FOR_RESPONSE; } - default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface IReceivingHttpResponseContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; default String getActivityName() { return ActivityNames.RECEIVING_RESPONSE; } - default String getScopeName() { return ScopeNames.REQUEST_SENDER_SCOPE; } } public interface ITupleHandlingContext extends IScopedInstrumentationAttributes, IWithTypedEnclosingScope { + String SCOPE_NAME = ScopeNames.TRAFFIC_REPLAYER_SCOPE; default String getActivityName() { return ActivityNames.TUPLE_HANDLING; } - default String getScopeName() { return ScopeNames.TRAFFIC_REPLAYER_SCOPE; } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java index 82c4f1560..f2dd17f89 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java @@ -1,62 +1,7 @@ package org.opensearch.migrations.replay.tracing; -import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.LongHistogram; -import io.opentelemetry.api.metrics.LongUpDownCounter; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; -public interface IRootReplayerContext extends IRootOtelContext { - LongHistogram getChannelDuration(); - LongHistogram getKafkaRecordDuration(); - LongHistogram getTrafficStreamLifecycleDuration(); - LongHistogram getHttpTransactionDuration(); - LongHistogram getRequestAccumulationDuration(); - LongHistogram getResponseAccumulationDuration(); - LongHistogram getRequestTransformationDuration(); - LongHistogram getScheduledDuration(); - LongHistogram getTargetRequestDuration(); - LongHistogram getRequestSendingDuration(); - LongHistogram getWaitingForResponseDuration(); - LongHistogram getReceivingResponseDuration(); - LongHistogram getTupleHandlingDuration(); - - LongHistogram getKafkaTouchDuration(); - LongHistogram getKafkaPollDuration(); - LongHistogram getCommitDuration(); - LongHistogram getKafkaCommitDuration(); - - LongHistogram getReadChunkDuration(); - LongHistogram getBackPressureDuration(); - LongHistogram getWaitForNextSignalDuration(); - - - LongCounter getChannelCounter(); - LongCounter getKafkaRecordCounter(); - LongCounter getTrafficStreamLifecycleCounter(); - LongCounter getHttpTransactionCounter(); - LongCounter getRequestAccumulationCounter(); - LongCounter getResponseAccumulationCounter(); - LongCounter getRequestTransformationCounter(); - LongCounter getScheduledCounter(); - LongCounter getTargetRequestCounter(); - LongCounter getRequestSendingCounter(); - LongCounter getWaitingForResponseCounter(); - LongCounter getReceivingResponseCounter(); - LongCounter getTupleHandlingCounter(); - - - LongCounter getKafkaTouchCounter(); - LongCounter getKafkaPollCounter(); - LongCounter getCommitCounter(); - LongCounter getKafkaCommitCounter(); - - LongCounter getReadChunkCounter(); - LongCounter getBackPressureCounter(); - LongCounter getWaitForNextSignalCounter(); - - LongUpDownCounter getActiveChannelsCounter(); - LongCounter getKafkaRecordBytesCounter(); - LongCounter getKafkaPartitionsRevokedCounter(); - LongCounter getKafkaPartitionsAssignedCounter(); - LongUpDownCounter getKafkaActivePartitionsCounter(); +public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java index 717b22137..64fff2976 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -16,8 +16,7 @@ private ActivityNames() {} } interface ITrafficSourceContext extends IScopedInstrumentationAttributes { - @Override - default String getScopeName() { return ScopeNames.TRAFFIC_SCOPE; } + String SCOPE_NAME = ScopeNames.TRAFFIC_SCOPE; } interface IReadChunkContext extends ITrafficSourceContext { @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index e2205c1d6..08fb8ab5c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -1,11 +1,15 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NonNull; import org.apache.kafka.common.TopicPartition; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -16,68 +20,88 @@ public class KafkaConsumerContexts { private KafkaConsumerContexts() {} @AllArgsConstructor - public static class AsyncListeningContext implements IKafkaConsumerContexts.IAsyncListeningContext { + public static class AsyncListeningContext + implements IKafkaConsumerContexts.IAsyncListeningContext { + public static class MetricInstruments { + public final LongCounter kafkaPartitionsRevokedCounter; + public final LongCounter kafkaPartitionsAssignedCounter; + public final LongUpDownCounter kafkaActivePartitionsCounter; + public MetricInstruments(MeterProvider meterProvider) { + var meter = meterProvider.get(SCOPE_NAME); + kafkaPartitionsRevokedCounter = meter + .counterBuilder(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT).build(); + kafkaPartitionsAssignedCounter = meter + .counterBuilder(IKafkaConsumerContexts.MetricNames.PARTITIONS_ASSIGNED_EVENT_COUNT).build(); + kafkaActivePartitionsCounter = meter + .upDownCounterBuilder(IKafkaConsumerContexts.MetricNames.ACTIVE_PARTITIONS_ASSIGNED_COUNT).build(); + } + } + @Getter @NonNull - private final IInstrumentationAttributes enclosingScope; + private final IInstrumentationAttributes enclosingScope; @Override - public @NonNull IRootReplayerContext getRootInstrumentationScope() { + public @NonNull RootReplayerContext getRootInstrumentationScope() { return enclosingScope.getRootInstrumentationScope(); } + private @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().asyncListeningInstruments; + } + public void onPartitionsRevoked(Collection partitions) { - meterIncrementEvent(getRootInstrumentationScope().getKafkaPartitionsRevokedCounter()); + meterIncrementEvent(getMetrics().kafkaPartitionsRevokedCounter); onParitionsAssignedChanged(partitions.size()); } public void onPartitionsAssigned(Collection partitions) { - meterIncrementEvent(getRootInstrumentationScope().getKafkaPartitionsAssignedCounter()); + meterIncrementEvent(getMetrics().kafkaPartitionsAssignedCounter); onParitionsAssignedChanged(partitions.size()); } private void onParitionsAssignedChanged(int delta) { - meterDeltaEvent(getRootInstrumentationScope().getKafkaActivePartitionsCounter(), delta); + meterDeltaEvent(getMetrics().kafkaActivePartitionsCounter, delta); } } public static class TouchScopeContext - extends DirectNestedSpanContext> + extends DirectNestedSpanContext> implements IKafkaConsumerContexts.ITouchScopeContext { - public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } + } + public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } - @Override - public LongHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getKafkaTouchDuration(); - } - - @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getKafkaTouchCounter(); + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().touchInstruments; } } public static class PollScopeContext extends DirectNestedSpanContext> implements IKafkaConsumerContexts.IPollScopeContext { + @Override + public CommonScopedMetricInstruments getMetrics() { + return getRootInstrumentationScope().poll; + } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } + } public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } - @Override - public LongHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getKafkaPollDuration(); - } - - @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getKafkaPollCounter(); - } } public static class CommitScopeContext @@ -89,7 +113,7 @@ public CommitScopeContext(@NonNull IInstrumentationAttributes> implements IReplayContexts.IChannelKeyContext { + @Getter final ISourceTrafficChannelKey channelKey; @Override - public LongHistogram getEndOfScopeDurationMetric() { + public DoubleHistogram getEndOfScopeDurationMetric() { return getRootInstrumentationScope().getChannelDuration(); } @@ -45,8 +46,6 @@ public String toString() { return channelKey.toString(); } - @Override public String getScopeName() { return "Connection"; } - @Override public void onTargetConnectionCreated() { meterDeltaEvent(getRootInstrumentationScope().getActiveChannelsCounter(), 1); @@ -78,7 +77,7 @@ public String getRecordId() { } @Override - public LongHistogram getEndOfScopeDurationMetric() { + public DoubleHistogram getEndOfScopeDurationMetric() { return getRootInstrumentationScope().getKafkaRecordDuration(); } @@ -117,7 +116,7 @@ public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { } @Override - public LongHistogram getEndOfScopeDurationMetric() { + public DoubleHistogram getEndOfScopeDurationMetric() { return getRootInstrumentationScope().getTrafficStreamLifecycleDuration(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 86d58d62d..387b84be1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -1,7 +1,8 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.MeterProvider; import org.opensearch.migrations.tracing.RootOtelContext; @@ -9,127 +10,16 @@ import lombok.Getter; @Getter -public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { - public final LongHistogram channelDuration; - public final LongHistogram kafkaRecordDuration; - public final LongHistogram trafficStreamLifecycleDuration; - public final LongHistogram httpTransactionDuration; - public final LongHistogram requestAccumulationDuration; - public final LongHistogram responseAccumulationDuration; - public final LongHistogram requestTransformationDuration; - public final LongHistogram scheduledDuration; - public final LongHistogram targetRequestDuration; - public final LongHistogram requestSendingDuration; - public final LongHistogram waitingForResponseDuration; - public final LongHistogram receivingResponseDuration; - public final LongHistogram tupleHandlingDuration; - - public final LongHistogram kafkaTouchDuration; - public final LongHistogram kafkaPollDuration; - public final LongHistogram commitDuration; - public final LongHistogram kafkaCommitDuration; - - public final LongHistogram readChunkDuration; - public final LongHistogram backPressureDuration; - public final LongHistogram waitForNextSignalDuration; - - - public final LongCounter channelCounter; - public final LongCounter kafkaRecordCounter; - public final LongCounter trafficStreamLifecycleCounter; - public final LongCounter httpTransactionCounter; - public final LongCounter requestAccumulationCounter; - public final LongCounter responseAccumulationCounter; - public final LongCounter requestTransformationCounter; - public final LongCounter scheduledCounter; - public final LongCounter targetRequestCounter; - public final LongCounter requestSendingCounter; - public final LongCounter waitingForResponseCounter; - public final LongCounter receivingResponseCounter; - public final LongCounter tupleHandlingCounter; - - public final LongCounter kafkaTouchCounter; - public final LongCounter kafkaPollCounter; - public final LongCounter commitCounter; - public final LongCounter kafkaCommitCounter; - - public final LongCounter readChunkCounter; - public final LongCounter backPressureCounter; - public final LongCounter waitForNextSignalCounter; - - public final LongUpDownCounter activeChannelsCounter; - public final LongCounter kafkaRecordBytesCounter; - public final LongCounter kafkaPartitionsRevokedCounter; - public final LongCounter kafkaPartitionsAssignedCounter; - public final LongUpDownCounter kafkaActivePartitionsCounter; - - public RootReplayerContext(MeterProvider meterProvider) { - channelDuration = buildHistogram(meterProvider, ); - kafkaRecordDuration = buildHistogram(meterProvider, );; - trafficStreamLifecycleDuration = buildHistogram(meterProvider, ); - httpTransactionDuration = buildHistogram(meterProvider, ); - requestAccumulationDuration = buildHistogram(meterProvider, ); - responseAccumulationDuration = buildHistogram(meterProvider, ); - requestTransformationDuration = buildHistogram(meterProvider, ); - scheduledDuration = buildHistogram(meterProvider, ); - targetRequestDuration = buildHistogram(meterProvider, ); - requestSendingDuration = buildHistogram(meterProvider, ); - waitingForResponseDuration = buildHistogram(meterProvider, ); - receivingResponseDuration = buildHistogram(meterProvider, ); - tupleHandlingDuration = buildHistogram(meterProvider, ); - - kafkaTouchDuration = buildHistogram(meterProvider, ); - kafkaPollDuration = buildHistogram(meterProvider, ); - commitDuration = buildHistogram(meterProvider, ); - kafkaCommitDuration = buildHistogram(meterProvider, ); - - readChunkDuration = buildHistogram(meterProvider, ); - backPressureDuration = buildHistogram(meterProvider, ); - waitForNextSignalDuration = buildHistogram(meterProvider, ); - - - channelCounter = buildCounter(meterProvider, ); - kafkaRecordCounter = buildCounter(meterProvider, ); - trafficStreamLifecycleCounter = buildCounter(meterProvider, ); - httpTransactionCounter = buildCounter(meterProvider, ); - requestAccumulationCounter = buildCounter(meterProvider, ); - responseAccumulationCounter = buildCounter(meterProvider, ); - requestTransformationCounter = buildCounter(meterProvider, ); - scheduledCounter = buildCounter(meterProvider, ); - targetRequestCounter = buildCounter(meterProvider, ); - requestSendingCounter = buildCounter(meterProvider, ); - waitingForResponseCounter = buildCounter(meterProvider, ); - receivingResponseCounter = buildCounter(meterProvider, ); - tupleHandlingCounter = buildCounter(meterProvider, ); - - kafkaTouchCounter = buildCounter(meterProvider, ); - kafkaPollCounter = buildCounter(meterProvider, ); - commitCounter = buildCounter(meterProvider, ); - kafkaCommitCounter = buildCounter(meterProvider, ); - - readChunkCounter = buildCounter(meterProvider, ); - backPressureCounter = buildCounter(meterProvider, ); - waitForNextSignalCounter = buildCounter(meterProvider, ); - - activeChannelsCounter = buildUpDownCounter(meterProvider, IReplayContexts.MetricNames.KAFKA_RECORD_READ); - kafkaRecordBytesCounter = buildCounter(meterProvider, IReplayContexts.MetricNames.KAFKA_BYTES_READ); - kafkaPartitionsRevokedCounter = - buildCounter(meterProvider, IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT); - kafkaPartitionsAssignedCounter = - buildCounter(meterProvider, IKafkaConsumerContexts.MetricNames.PARTITIONS_ASSIGNED_EVENT_COUNT); - kafkaActivePartitionsCounter = - buildUpDownCounter(meterProvider, IKafkaConsumerContexts.MetricNames.ACTIVE_PARTITIONS_ASSIGNED_COUNT); - } - - private static LongCounter buildCounter(MeterProvider meterProvider, String eventName) { - meterProvider.get(); - } - - private static LongCounter buildHistogram(MeterProvider meterProvider, String eventName) { - - } - - private static LongUpDownCounter buildUpDownCounter(MeterProvider meterProvider, String eventName) { +public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { + public final KafkaConsumerContexts.AsyncListeningContext.MetricInstruments asyncListeningInstruments; + public final KafkaConsumerContexts.TouchScopeContext.MetricInstruments touchInstruments; + public final KafkaConsumerContexts.PollScopeContext.MetricInstruments pollInstruments; + + public RootReplayerContext(OpenTelemetry sdk) { + super(sdk); + var meterProvider = this.getMeterProvider(); + asyncListeningInstruments = new KafkaConsumerContexts.AsyncListeningContext.MetricInstruments(meterProvider); + touchInstruments = new KafkaConsumerContexts.TouchScopeContext.MetricInstruments(meterProvider); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index ceab43fc7..5c88eda46 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -35,6 +35,8 @@ public InputStreamOfTraffic(IInstrumentationAttributes context, InputStream inpu private static class IOSTrafficStreamContext extends DirectNestedSpanContext implements IReplayContexts.ITrafficStreamsLifecycleContext { + public static final String SCOPE_NAME = TELEMETRY_SCOPE_NAME; + @Getter private final ITrafficStreamKey trafficStreamKey; public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, ITrafficStreamKey tsk) { @@ -50,7 +52,6 @@ public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getImmediateEnclosingScope(); } - @Override public String getScopeName() { return TELEMETRY_SCOPE_NAME; } } /** diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index 024bc098f..0e4b4e207 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -18,8 +18,7 @@ public TestTrafficStreamsLifecycleContext(IInstrumentationAttributes rootContext initializeSpan(); } - @Override - public String getScopeName() { return "testScope"; } + public static final String SCOPE_NAME = "testScope"; @Override public String getActivityName() { return "testTrafficSpan"; } From 156ae72a07b6aa3bc93b5c5ddf991e60c4ad94de Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 11 Jan 2024 00:02:08 -0500 Subject: [PATCH 54/94] Another checkpoint that still doesn't compile, but less files (I think) are problematic. Signed-off-by: Greg Schohn --- .../CommonScopedMetricInstruments.java | 4 +- .../tracing/DirectNestedSpanContext.java | 5 +- .../tracing/IInstrumentConstructor.java | 5 +- .../tracing/IInstrumentationAttributes.java | 11 +- .../migrations/tracing/IRootOtelContext.java | 2 +- .../IScopedInstrumentationAttributes.java | 4 +- .../tracing/IWithStartTimeAndAttributes.java | 42 +- .../migrations/tracing/MeteringClosure.java | 37 -- .../tracing/MeteringClosureForStartTimes.java | 46 -- .../migrations/tracing/RootOtelContext.java | 16 +- .../replay/AccumulationCallbacks.java | 12 +- ...edTrafficToHttpTransactionAccumulator.java | 7 +- .../replay/RequestResponsePacketPair.java | 15 +- .../migrations/replay/TrafficReplayer.java | 80 +-- .../NettyPacketToHttpConsumer.java | 20 +- ...dHttpRequestPreliminaryConvertHandler.java | 5 +- .../http/RequestPipelineOrchestrator.java | 19 +- .../datatypes/ISourceTrafficChannelKey.java | 4 +- .../tracing/IKafkaConsumerContexts.java | 10 +- .../replay/tracing/IReplayContexts.java | 234 ++++++--- .../replay/tracing/IRootReplayerContext.java | 4 +- .../tracing/ITrafficSourceContexts.java | 20 +- .../replay/tracing/KafkaConsumerContexts.java | 66 +-- .../replay/tracing/ReplayContexts.java | 465 ++++++++++++------ .../replay/tracing/RootReplayerContext.java | 55 ++- .../replay/tracing/TrafficSourceContexts.java | 51 +- .../traffic/source/BlockingTrafficSource.java | 19 +- .../traffic/source/ITrafficCaptureSource.java | 7 +- .../traffic/source/InputStreamOfTraffic.java | 37 +- ...afficToHttpTransactionAccumulatorTest.java | 8 +- .../replay/TrafficReplayerTest.java | 16 +- 31 files changed, 806 insertions(+), 520 deletions(-) delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java index 8b1b1a0a5..b6d6954ed 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java @@ -11,8 +11,8 @@ @Slf4j public class CommonScopedMetricInstruments { - public final LongCounter contextCounter; - public final DoubleHistogram contextDuration; + final LongCounter contextCounter; + final DoubleHistogram contextDuration; public CommonScopedMetricInstruments(MeterProvider meterProvider, String scopeName, String activityName) { var meter = meterProvider.get(scopeName); contextCounter = meter diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java index d8845b07d..926d2820c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java @@ -1,9 +1,10 @@ package org.opensearch.migrations.tracing; -public abstract class DirectNestedSpanContext, +public abstract class DirectNestedSpanContext> extends AbstractNestedSpanContext - implements IWithTypedEnclosingScope { + implements IWithTypedEnclosingScope +{ public DirectNestedSpanContext(T enclosingScope) { super(enclosingScope); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index e56e70a10..a8ef5dee1 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -3,9 +3,8 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; -public interface IInstrumentConstructor { +public interface IInstrumentConstructor { + Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, AttributesBuilder attributesBuilder); - MeteringClosure buildSimpleMeterClosure(IInstrumentationAttributes context); - MeteringClosureForStartTimes buildMeterClosure(IWithStartTimeAndAttributes context); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index e5da21ce9..7e441faec 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -41,20 +41,23 @@ default void meterIncrementEvent(LongCounter c) { meterIncrementEvent(c, Attributes.builder()); } default void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildSimpleMeterClosure(this).meterIncrementEvent(c, attributesBuilder); + meterIncrementEvent(c, 1, attributesBuilder); } default void meterIncrementEvent(LongCounter c, long increment) { meterIncrementEvent (c, increment, Attributes.builder()); } default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildSimpleMeterClosure(this) - .meterIncrementEvent(c, increment, attributesBuilder); + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + c.add(increment); + } } default void meterDeltaEvent(LongUpDownCounter c, long delta) { meterDeltaEvent(c, delta, Attributes.builder()); } default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildSimpleMeterClosure(this).meterDeltaEvent(c, delta, attributesBuilder); + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + c.add(delta); + } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java index b9b05c992..e30a1665f 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java @@ -3,7 +3,7 @@ import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; -public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { +public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { MeterProvider getMeterProvider(); default Meter getMeterForScope(String scopeName) { return getMeterProvider().get(scopeName); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 8ad2f2627..7fba4c9cb 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -5,7 +5,7 @@ import io.opentelemetry.api.trace.Span; import lombok.NonNull; -public interface IScopedInstrumentationAttributes> +public interface IScopedInstrumentationAttributes extends IWithStartTimeAndAttributes, AutoCloseable { String getActivityName(); @@ -25,7 +25,7 @@ default void endSpan() { default void sendMeterEventsForEnd() { meterIncrementEvent(getEndOfScopeCountMetric()); - meterHistogramMicros(getEndOfScopeDurationMetric()); + meterHistogramMillis(getEndOfScopeDurationMetric()); } default void close() { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 2c369bbd3..582967a3b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -10,39 +10,27 @@ public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { Instant getStartTime(); - default void meterHistogramMillis(DoubleHistogram histogram) { - meterHistogramMillis(histogram, Attributes.builder()); - } - default void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMillis(histogram, attributesBuilder); + meterHistogramMillis(histogram, Duration.between(getStartTime(), Instant.now())); } default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { - meterHistogramMillis(histogram, value, Attributes.builder()); - } - default void meterHistogramMillis(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMillis(histogram, value, attributesBuilder); - } - - default void meterHistogramMicros(DoubleHistogram histogram, Duration value) { - meterHistogramMicros(histogram, value, Attributes.builder()); - } - default void meterHistogramMicros(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this).meterHistogramMicros(histogram, value, attributesBuilder); + meterHistogramMillis(histogram, value, null); } - default void meterHistogramMicros(DoubleHistogram histogram) { - meterHistogramMicros(histogram, Attributes.builder()); + default void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { + meterHistogramMillis(histogram, Duration.between(getStartTime(), Instant.now()), + attributesBuilder); } - default void meterHistogramMicros(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this) - .meterHistogramMicros(histogram, attributesBuilder); + default void meterHistogramMillis(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { + meterHistogram(histogram, value.toNanos()/1_000_000.0, attributesBuilder); } - - default void meterHistogram(DoubleHistogram histogram, long value) { - meterHistogram(histogram, value, Attributes.builder()); + default void meterHistogram(DoubleHistogram histogram, double value) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + histogram.record(value); + } } - default void meterHistogram(DoubleHistogram histogram, long value, AttributesBuilder attributesBuilder) { - getRootInstrumentationScope().buildMeterClosure(this).meterHistogram(histogram, value, attributesBuilder); + default void meterHistogram(DoubleHistogram histogram, double value, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + histogram.record(value); + } } - } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java deleted file mode 100644 index dcc0c8dc8..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosure.java +++ /dev/null @@ -1,37 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.LongUpDownCounter; -import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.context.Context; -import io.opentelemetry.context.Scope; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -@AllArgsConstructor -public class MeteringClosure { - public final IInstrumentationAttributes ctx; - - public void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { - meterIncrementEvent(c, 1, attributesBuilder); - } - - public void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { - if (ctx == null) { - return; - } - try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { - c.add(increment); - } - } - - public void meterDeltaEvent(LongUpDownCounter c, long delta, - AttributesBuilder attributesBuilder) { - try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { - c.add(delta); - } - } -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java deleted file mode 100644 index 6d8c85db9..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/MeteringClosureForStartTimes.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.DoubleHistogram; -import io.opentelemetry.api.metrics.Meter; - -import java.time.Duration; -import java.time.Instant; - -public class MeteringClosureForStartTimes extends MeteringClosure { - - MeteringClosureForStartTimes(IWithStartTimeAndAttributes ctx) { - super(ctx); - } - - public IWithStartTimeAndAttributes getContext() { - return (IWithStartTimeAndAttributes) ctx; - } - - public void meterHistogramMicros(DoubleHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { - meterHistogram(histogram, between.toNanos()*1000, attributesBuilder); - } - - public void meterHistogramMillis(DoubleHistogram histogram, Duration between, AttributesBuilder attributesBuilder) { - meterHistogram(histogram, between.toMillis(), attributesBuilder); - } - - public void meterHistogram(DoubleHistogram h, long value, AttributesBuilder attributesBuilder) { - if (ctx == null) { - return; - } - try (var scope = new NullableExemplarScope(ctx.getCurrentSpan())) { - h.record(value); - } - } - - public void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { - meterHistogram(histogram, Duration.between(getContext().getStartTime(), Instant.now()).toMillis(), - attributesBuilder); - } - - public void meterHistogramMicros(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { - meterHistogram(histogram, - Duration.between(getContext().getStartTime(), Instant.now()).toNanos()*1000, attributesBuilder); - } -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 3dd341d3a..a15c61b8b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -112,17 +112,6 @@ public S getRootInstrumentationScope() { return (S) this; } // CRTP so that callers can get more specific - @Override - public MeteringClosure buildSimpleMeterClosure(IInstrumentationAttributes ctx) { - return new MeteringClosure<>(ctx); - } - - @Override - public MeteringClosureForStartTimes - buildMeterClosure(IWithStartTimeAndAttributes ctx) { - return new MeteringClosureForStartTimes<>(ctx); - } - @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; // nothing more to do @@ -135,8 +124,9 @@ private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, S } @Override - public Span buildSpan(IInstrumentationAttributes enclosingScope, - String scopeName, String spanName, AttributesBuilder attributesBuilder) { + public + Span buildSpan(IInstrumentationAttributes enclosingScope, + String scopeName, String spanName, AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index 998a09d78..0b9813975 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -5,24 +5,26 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import java.time.Instant; import java.util.List; public interface AccumulationCallbacks { void onRequestReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request); void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrpp); void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); - void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IReplayContexts.IChannelKeyContext ctx); + void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, + IReplayContexts.IChannelKeyContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 09c29baa6..2cb411ed1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -9,9 +9,12 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -102,7 +105,7 @@ public void onExpireAccumulation(String partitionId, Accumulation accumulation) @AllArgsConstructor private static class SpanWrappingAccumulationCallbacks { private final AccumulationCallbacks underlying; - public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, + public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { requestCtx.close(); underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), @@ -126,7 +129,7 @@ public void onConnectionClose(@NonNull Accumulation accum, } public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.ITrafficStreamsLifecycleContext tsCtx, + IReplayContexts.ITrafficStreamsLifecycleContext tsCtx, @NonNull List trafficStreamKeysBeingHeld) { underlying.onTrafficStreamsExpired(status, tsCtx.getLogicalEnclosingScope(), trafficStreamKeysBeingHeld); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 1866eae3b..4a44f19be 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -6,6 +6,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; @@ -33,7 +34,7 @@ public enum ReconstructionStatus { ReconstructionStatus completionStatus; // switch between RequestAccumulation/ResponseAccumulation objects when we're parsing, // or just leave this null, in which case, the context from the trafficStreamKey should be used - private IScopedInstrumentationAttributes requestOrResponseAccumulationContext; + private IScopedInstrumentationAttributes requestOrResponseAccumulationContext; public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, Instant sourceTimestamp, int startingSourceRequestIndex, int indexOfCurrentRequest) { @@ -50,7 +51,7 @@ public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStr return firstTrafficStreamKeyForRequest; } - public IReplayContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { + public IReplayContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { var looseCtx = requestOrResponseAccumulationContext; // the req/response ctx types in the assert below will always implement this with the // IReplayerHttpTransactionContext parameter, but this seems clearer @@ -58,17 +59,17 @@ public IReplayContexts.IReplayerHttpTransactionContext getHttpTransactionContext assert looseCtx instanceof IWithTypedEnclosingScope; assert looseCtx instanceof IReplayContexts.IRequestAccumulationContext || looseCtx instanceof IReplayContexts.IResponseAccumulationContext; - return ((IWithTypedEnclosingScope) looseCtx) + return ((IWithTypedEnclosingScope>) looseCtx) .getLogicalEnclosingScope(); } - public @NonNull IReplayContexts.IRequestAccumulationContext getRequestContext() { - return (IReplayContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; + public @NonNull IReplayContexts.IRequestAccumulationContext getRequestContext() { + return (IReplayContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; } - public @NonNull IReplayContexts.IResponseAccumulationContext getResponseContext() { - return (IReplayContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; + public @NonNull IReplayContexts.IResponseAccumulationContext getResponseContext() { + return (IReplayContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; } public void rotateRequestGatheringToResponse() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 9c0dff1cb..afed17e32 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -14,8 +14,9 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; @@ -88,21 +89,21 @@ public class TrafficReplayer { private final TrafficStreamLimiter liveTrafficStreamLimiter; private final AtomicInteger successfulRequestCount; private final AtomicInteger exceptionRequestCount; - private final IInstrumentationAttributes topLevelContext; - private ConcurrentHashMap> requestFutureMap; - private ConcurrentHashMap> requestToFinalWorkFuturesMap; - private AtomicBoolean stopReadingRef; - private AtomicReference> allRemainingWorkFutureOrShutdownSignalRef; - private AtomicReference shutdownReasonRef; - private AtomicReference> shutdownFutureRef; - private AtomicReference>> nextChunkFutureRef; - private ConcurrentHashMap liveRequests = new ConcurrentHashMap<>(); + private final AtomicBoolean stopReadingRef; + private final AtomicReference> allRemainingWorkFutureOrShutdownSignalRef; + private final AtomicReference shutdownReasonRef; + private final AtomicReference> shutdownFutureRef; + private final AtomicReference>> nextChunkFutureRef; + private final ConcurrentHashMap liveRequests = new ConcurrentHashMap<>(); private Future nettyShutdownFuture; - public class DualException extends Exception { + public static class DualException extends Exception { public final Throwable originalCause; public final Throwable immediateCause; public DualException(Throwable originalCause, Throwable immediateCause) { @@ -117,13 +118,13 @@ public DualException(String message, Throwable originalCause, Throwable immediat } } - public class TerminationException extends DualException { + public static class TerminationException extends DualException { public TerminationException(Throwable originalCause, Throwable immediateCause) { super(originalCause, immediateCause); } } - public TrafficReplayer(IInstrumentationAttributes context, + public TrafficReplayer(IRootReplayerContext context, URI serverUri, String fullTransformerConfig, IAuthTransformerFactory authTransformerFactory, @@ -134,7 +135,7 @@ public TrafficReplayer(IInstrumentationAttributes context, } - public TrafficReplayer(IInstrumentationAttributes context, + public TrafficReplayer(IRootReplayerContext context, URI serverUri, String fullTransformerConfig, IAuthTransformerFactory authTransformerFactory, @@ -149,7 +150,7 @@ public TrafficReplayer(IInstrumentationAttributes context, ); } - public TrafficReplayer(IInstrumentationAttributes context, + public TrafficReplayer(IRootReplayerContext context, URI serverUri, IAuthTransformerFactory authTransformer, boolean allowInsecureConnections, @@ -390,15 +391,15 @@ public static void main(String[] args) System.exit(3); return; } - var topContext = new RootOtelContext(params.otelCollectorEndpoint, "replay"); + var topContext = new RootReplayerContext(RootOtelContext.initializeOpenTelemetry(params.otelCollectorEndpoint, + "replay")); try (var blockingTrafficSource = TrafficCaptureSourceFactory.createTrafficCaptureSource(topContext, params, Duration.ofSeconds(params.lookaheadTimeSeconds)); var authTransformer = buildAuthTransformerFactory(params)) { String transformerConfig = getTransformerConfig(params); - if (transformerConfig != null) - { - log.info("Transformations config string: ", transformerConfig); + if (transformerConfig != null) { + log.atInfo().setMessage(()->"Transformations config string: " + transformerConfig).log(); } var tr = new TrafficReplayer(topContext, uri, transformerConfig, authTransformer, params.userAgent, params.allowInsecureConnections, params.numClientThreads, params.maxConcurrentRequests); @@ -441,11 +442,11 @@ private static void setupShutdownHookForReplayer(TrafficReplayer tr) { * aspect out from the core logic below. */ private static String formatAuthArgFlagsAsString() { - return List.of(REMOVE_AUTH_HEADER_VALUE_ARG, - AUTH_HEADER_VALUE_ARG, - AWS_AUTH_HEADER_USER_AND_SECRET_ARG, - SIGV_4_AUTH_HEADER_SERVICE_REGION_ARG).stream() - .collect(Collectors.joining(", ")); + return String.join(", ", + REMOVE_AUTH_HEADER_VALUE_ARG, + AUTH_HEADER_VALUE_ARG, + AWS_AUTH_HEADER_USER_AND_SECRET_ARG, + SIGV_4_AUTH_HEADER_SERVICE_REGION_ARG); } private static IAuthTransformerFactory buildAuthTransformerFactory(Parameters params) { @@ -488,7 +489,7 @@ private static IAuthTransformerFactory buildAuthTransformerFactory(Parameters pa String region = serviceAndRegion[1]; return new IAuthTransformerFactory() { - DefaultCredentialsProvider defaultCredentialsProvider = DefaultCredentialsProvider.create(); + final DefaultCredentialsProvider defaultCredentialsProvider = DefaultCredentialsProvider.create(); @Override public IAuthTransformer getAuthTransformer(IHttpMessage httpMessage) { return new SigV4Signer(defaultCredentialsProvider, serviceName, region, "https", null); @@ -528,10 +529,8 @@ void setupRunAndWaitForReplay(Duration observedPacketConnectionTimeout, } finally { trafficToHttpTransactionAccumulator.close(); wrapUpWorkAndEmitSummary(replayEngine, trafficToHttpTransactionAccumulator); - if (shutdownFutureRef.get() == null) { - assert requestToFinalWorkFuturesMap.isEmpty() : - "expected to wait for all the in flight requests to fully flush and self destruct themselves"; - } + assert shutdownFutureRef.get() != null || requestToFinalWorkFuturesMap.isEmpty() : + "expected to wait for all the in flight requests to fully flush and self destruct themselves"; } } @@ -553,8 +552,7 @@ private void wrapUpWorkAndEmitSummary(ReplayEngine replayEngine, CapturedTraffic logLevel = secondaryLogLevel; } } - if (requestToFinalWorkFuturesMap.size() > 0 || - exceptionRequestCount.get() > 0) { + if (!requestToFinalWorkFuturesMap.isEmpty() || exceptionRequestCount.get() > 0) { log.atWarn().setMessage("{} in-flight requests being dropped due to pending shutdown; " + "{} requests to the target threw an exception; " + "{} requests were successfully processed.") @@ -606,7 +604,7 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { @Override public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); @@ -625,7 +623,7 @@ public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, @Override public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrPair) { log.atInfo().setMessage(()->"Done receiving captured stream for " + requestKey + ":" + rrPair.requestData).log(); @@ -641,7 +639,7 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, } } - Void handleCompletedTransaction(IInstrumentationAttributes context, + Void handleCompletedTransaction(IInstrumentationAttributes context, @NonNull UniqueReplayerRequestKey requestKey, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { @@ -651,7 +649,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, // packaging it up and calling the callback. // Escalate it up out handling stack and shutdown. if (t == null || t instanceof Exception) { - try (var tupleHandlingContext = new ReplayContexts.TupleHandlingContext(httpContext)) { + try (var tupleHandlingContext = httpContext.createTupleContext()) { packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); } commitTrafficStreams(context, rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); @@ -688,13 +686,13 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { commitTrafficStreams(ctx, trafficStreamKeysBeingHeld, status); } @SneakyThrows - private void commitTrafficStreams(IInstrumentationAttributes context, + private void commitTrafficStreams(IInstrumentationAttributes context, List trafficStreamKeysBeingHeld, RequestResponsePacketPair.ReconstructionStatus status) { commitTrafficStreams(context, trafficStreamKeysBeingHeld, @@ -702,7 +700,7 @@ private void commitTrafficStreams(IInstrumentationAttributes context, } @SneakyThrows - private void commitTrafficStreams(IInstrumentationAttributes context, + private void commitTrafficStreams(IInstrumentationAttributes context, List trafficStreamKeysBeingHeld, boolean shouldCommit) { if (shouldCommit && trafficStreamKeysBeingHeld != null) { for (var tsk : trafficStreamKeysBeingHeld) { @@ -713,8 +711,10 @@ private void commitTrafficStreams(IInstrumentationAttributes context, } @Override - public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, int channelInteractionNum, - IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, + int channelInteractionNum, + IReplayContexts.IChannelKeyContext ctx, + RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant timestamp, @NonNull List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index a7cc705da..cbf728ba2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -26,6 +26,7 @@ import org.opensearch.migrations.replay.datahandlers.http.helpers.WriteMeteringHandler; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; @@ -58,7 +59,7 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - IWithTypedEnclosingScope currentRequestContextUnion; + IWithTypedEnclosingScope> currentRequestContextUnion; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, ReplayContexts.HttpTransactionContext httpTransactionContext) { @@ -66,7 +67,8 @@ public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri httpTransactionContext.getLogicalEnclosingScope()), httpTransactionContext); } - public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IReplayContexts.IReplayerHttpTransactionContext ctx) { + public NettyPacketToHttpConsumer(ChannelFuture clientConnection, + IReplayContexts.IReplayerHttpTransactionContext ctx) { var parentContext = new ReplayContexts.TargetRequestContext(ctx); this.setCurrentRequestContext(new ReplayContexts.RequestSendingContext(parentContext)); responseBuilder = AggregatedRawResponse.builder(Instant.now()); @@ -91,24 +93,24 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IReplayContexts }); } - private & - IScopedInstrumentationAttributes> + private > & + IScopedInstrumentationAttributes> void setCurrentRequestContext(T requestSendingContext) { currentRequestContextUnion = requestSendingContext; } - private IScopedInstrumentationAttributes getCurrentRequestSpan() { - return (IScopedInstrumentationAttributes) currentRequestContextUnion; + private IScopedInstrumentationAttributes getCurrentRequestSpan() { + return (IScopedInstrumentationAttributes) currentRequestContextUnion; } - public IReplayContexts.ITargetRequestContext getParentContext() { + public IReplayContexts.ITargetRequestContext getParentContext() { return currentRequestContextUnion.getLogicalEnclosingScope(); } public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, SslContext sslContext, URI serverUri, - IReplayContexts.IChannelKeyContext channelKeyContext) { + IReplayContexts.IChannelKeyContext channelKeyContext) { String host = serverUri.getHost(); int port = serverUri.getPort(); log.atTrace().setMessage(()->"Active - setting up backend connection to " + host + ":" + port).log(); @@ -238,7 +240,7 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa return activeChannelFuture; } - private IReplayContexts.IReplayerHttpTransactionContext httpContext() { + private IReplayContexts.IReplayerHttpTransactionContext httpContext() { return getParentContext().getLogicalEnclosingScope(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 2b51a7b5c..3a8e7e845 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -12,6 +12,7 @@ import org.opensearch.migrations.replay.datahandlers.PayloadAccessFaultingMap; import org.opensearch.migrations.replay.datahandlers.PayloadNotLoadedException; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IJsonTransformer; @@ -28,13 +29,13 @@ public class NettyDecodedHttpRequestPreliminaryConvertHandler extends Channel final IJsonTransformer transformer; final List> chunkSizes; final String diagnosticLabel; - private IReplayContexts.IRequestTransformationContext httpTransactionContext; + private final IReplayContexts.IRequestTransformationContext httpTransactionContext; static final MetricsLogger metricsLogger = new MetricsLogger("NettyDecodedHttpRequestPreliminaryConvertHandler"); public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, - IReplayContexts.IRequestTransformationContext httpTransactionContext) { + IReplayContexts.IRequestTransformationContext httpTransactionContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 589dec757..131608c70 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -13,6 +13,7 @@ import org.opensearch.migrations.replay.datahandlers.http.helpers.LastHttpContentListener; import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -44,14 +45,14 @@ public class RequestPipelineOrchestrator { public static final String HTTP_REQUEST_DECODER_NAME = "HTTP_REQUEST_DECODER"; private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; - private IReplayContexts.IRequestTransformationContext httpTransactionContext; + private final IReplayContexts.IRequestTransformationContext httpTransactionContext; @Getter final IAuthTransformerFactory authTransfomerFactory; public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, - IReplayContexts.IRequestTransformationContext httpTransactionContext) { + IReplayContexts.IRequestTransformationContext httpTransactionContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : @@ -87,7 +88,7 @@ void addJsonParsingHandlers(ChannelHandlerContext ctx, void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) { pipeline.addFirst(HTTP_REQUEST_DECODER_NAME, new HttpRequestDecoder()); addLoggingHandler(pipeline, "A"); - pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.aggregateInputChunk(size))); + pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::aggregateInputChunk)); // IN: Netty HttpRequest(1) + HttpContent(1) blocks (which may be compressed) + EndOfInput + ByteBuf // OUT: ByteBufs(1) OR Netty HttpRequest(1) + HttpJsonMessage(1) with only headers PLUS + HttpContent(1) blocks // Note1: original Netty headers are preserved so that HttpContentDecompressor can work appropriately. @@ -110,11 +111,11 @@ void addContentParsingHandlers(ChannelHandlerContext ctx, httpTransactionContext.onPayloadParse(); log.debug("Adding content parsing handlers to pipeline"); var pipeline = ctx.pipeline(); - pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onPayloadBytesIn(size))); + pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onPayloadBytesIn)); // IN: Netty HttpRequest(1) + HttpJsonMessage(1) with headers + HttpContent(1) blocks (which may be compressed) // OUT: Netty HttpRequest(2) + HttpJsonMessage(1) with headers + HttpContent(2) uncompressed blocks pipeline.addLast(new HttpContentDecompressor()); - pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onUncompressedBytesIn(size))); + pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onUncompressedBytesIn)); if (transformer != null) { httpTransactionContext.onJsonPayloadParseRequired(); log.debug("Adding JSON handlers to pipeline"); @@ -134,12 +135,12 @@ void addContentParsingHandlers(ChannelHandlerContext ctx, pipeline.addLast(new NettyJsonContentAuthSigner(authTransfomer)); addLoggingHandler(pipeline, "G"); } - pipeline.addLast(new LastHttpContentListener(()->httpTransactionContext.onPayloadParseSuccess())); - pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onUncompressedBytesOut(size))); + pipeline.addLast(new LastHttpContentListener(httpTransactionContext::onPayloadParseSuccess)); + pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onUncompressedBytesOut)); // IN: Netty HttpRequest(2) + HttpJsonMessage(3) with headers only + HttpContent(3) blocks // OUT: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + HttpContent(4) blocks pipeline.addLast(new NettyJsonContentCompressor()); - pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.onFinalBytesOut(size))); + pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onFinalBytesOut)); addLoggingHandler(pipeline, "H"); // IN: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + HttpContent(4) blocks + EndOfInput // OUT: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + ByteBufs(2) @@ -153,7 +154,7 @@ void addBaselineHandlers(ChannelPipeline pipeline) { // IN: ByteBufs(2) + HttpJsonMessage(4) with headers only + HttpContent(1) (if the repackaging handlers were skipped) // OUT: ByteBufs(3) which are sized similarly to how they were received pipeline.addLast(new NettyJsonToByteBufHandler(Collections.unmodifiableList(chunkSizes))); - pipeline.addLast(new ReadMeteringingHandler(size->httpTransactionContext.aggregateOutputChunk(size))); + pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::aggregateOutputChunk)); // IN: ByteBufs(3) // OUT: nothing - terminal! ByteBufs are routed to the packet handler! addLoggingHandler(pipeline, "K"); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java index 17feb6fe3..1a068d108 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java @@ -2,9 +2,11 @@ import lombok.NonNull; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; +import org.opensearch.migrations.tracing.IInstrumentConstructor; public interface ISourceTrafficChannelKey { String getNodeId(); String getConnectionId(); - @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); + @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index 4d0abf41a..f5632a626 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -30,16 +30,16 @@ interface IAsyncListeningContext extends IInstru String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; @Override default String getScopeName() { return SCOPE_NAME; } } - interface IKafkaConsumerScope> extends IScopedInstrumentationAttributes { + interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; @Override default String getScopeName() { return SCOPE_NAME; } } - interface ITouchScopeContext> extends IKafkaConsumerScope { + interface ITouchScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.TOUCH; @Override default String getActivityName() { return ACTIVITY_NAME; } } - interface IPollScopeContext> extends IKafkaConsumerScope { + interface IPollScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.KAFKA_POLL; @Override default String getActivityName() { return ACTIVITY_NAME; } @@ -48,7 +48,7 @@ interface IPollScopeContext> extends IKafkaC /** * Context for the KafkaConsumer's bookkeeping around and including the commit service call */ - interface ICommitScopeContext> extends IKafkaConsumerScope { + interface ICommitScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.COMMIT; @Override default String getActivityName() { return ACTIVITY_NAME; } @@ -57,7 +57,7 @@ interface ICommitScopeContext> extends IKafk /** * Context for ONLY the service call to Kafka to perform the commit. */ - interface IKafkaCommitScopeContext> extends IKafkaConsumerScope{ + interface IKafkaCommitScopeContext extends IKafkaConsumerScope{ String ACTIVITY_NAME = ActivityNames.KAFKA_COMMIT; @Override default String getActivityName() { return ACTIVITY_NAME; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 4a8e9c7cd..b0b593786 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -2,9 +2,14 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; @@ -13,17 +18,17 @@ import java.time.Instant; public class IReplayContexts { - - public static class ScopeNames { - private ScopeNames() {} - - public static final String KAFKA_RECORD_SCOPE = "KafkaRecord"; - public static final String TRAFFIC_STREAM_LIFETIME_SCOPE = "TrafficStreamLifetime"; - public static final String ACCUMULATOR_SCOPE = "Accumulator"; - public static final String HTTP_TRANSFORMER_SCOPE = "HttpTransformer"; - public static final String REQUEST_SENDER_SCOPE = "RequestSender"; - public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; - } +// +// public static class ScopeNames { +// private ScopeNames() {} +// +// public static final String KAFKA_RECORD_SCOPE = "KafkaRecord"; +// public static final String TRAFFIC_STREAM_LIFETIME_SCOPE = "TrafficStreamLifetime"; +// public static final String ACCUMULATOR_SCOPE = "Accumulator"; +// public static final String HTTP_TRANSFORMER_SCOPE = "HttpTransformer"; +// public static final String REQUEST_SENDER_SCOPE = "RequestSender"; +// public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; +// } public static class ActivityNames { private ActivityNames() {} @@ -44,6 +49,7 @@ private ActivityNames() {} } public static class MetricNames { + private MetricNames() {} public static final String KAFKA_RECORD_READ = "kafkaRecordsRead"; public static final String KAFKA_BYTES_READ = "kafkaBytesRead"; public static final String TRAFFIC_STREAMS_READ = "trafficStreamsRead"; @@ -70,11 +76,31 @@ public static class MetricNames { public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; } - public interface IChannelKeyContext extends IConnectionContext { - String SCOPE_NAME = "Connection"; + public interface IAccumulationScope extends IScopedInstrumentationAttributes { + String SCOPE_NAME2 = "Replay"; @Override - default String getActivityName() { return ActivityNames.CHANNEL; } + default String getScopeName() { + return SCOPE_NAME2; + } + } + + public interface IChannelKeyContext + extends IAccumulationScope, + IConnectionContext { + String ACTIVITY_NAME = ActivityNames.CHANNEL; + + class MetricInstruments extends CommonScopedMetricInstruments { + final LongUpDownCounter activeChannelCounter; + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + activeChannelCounter = meter + .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); + } + } + + @Override default String getActivityName() { return ACTIVITY_NAME;} // do not add this as a property // because its components are already being added in the IConnectionContext implementation @@ -93,31 +119,43 @@ default String getNodeId() { void onTargetConnectionClosed(); } - public interface IKafkaRecordContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope + public interface IKafkaRecordContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { - String SCOPE_NAME = ScopeNames.KAFKA_RECORD_SCOPE; + String ACTIVITY_NAME = ActivityNames.RECORD_LIFETIME; - @Override - default String getActivityName() { return ActivityNames.RECORD_LIFETIME; } + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + @Override default String getActivityName() { return ACTIVITY_NAME;} static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); String getRecordId(); default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IScopedInstrumentationAttributes.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); + return IAccumulationScope.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); } } - public interface ITrafficStreamsLifecycleContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.TRAFFIC_STREAM_LIFETIME_SCOPE; - default String getActivityName() { return ActivityNames.TRAFFIC_STREAM_LIFETIME; } + public interface ITrafficStreamsLifecycleContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.TRAFFIC_STREAM_LIFETIME; + + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + @Override default String getActivityName() { return ACTIVITY_NAME;} ITrafficStreamKey getTrafficStreamKey(); - IChannelKeyContext getChannelKeyContext(); + IChannelKeyContext getChannelKeyContext(); default String getConnectionId() { return getChannelKey().getConnectionId(); } @@ -126,16 +164,25 @@ default ISourceTrafficChannelKey getChannelKey() { } } - public interface IReplayerHttpTransactionContext - extends IHttpTransactionContext, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.ACCUMULATOR_SCOPE; + public interface IReplayerHttpTransactionContext + extends IHttpTransactionContext, + IAccumulationScope, + IWithTypedEnclosingScope> { AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); - default String getActivityName() { return ActivityNames.HTTP_TRANSACTION; } + ITupleHandlingContext createTupleContext(); + + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + String ACTIVITY_NAME = ActivityNames.HTTP_TRANSACTION; + @Override default String getActivityName() { return ACTIVITY_NAME;} UniqueReplayerRequestKey getReplayerRequestKey(); - IChannelKeyContext getChannelKeyContext(); + IChannelKeyContext getChannelKeyContext(); Instant getTimeOfOriginalRequest(); default String getConnectionId() { @@ -160,26 +207,48 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } } - public interface IRequestAccumulationContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.ACCUMULATOR_SCOPE; - default String getActivityName() { return ActivityNames.ACCUMULATING_REQUEST; } + public interface IRequestAccumulationContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.ACCUMULATING_REQUEST; + + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + @Override + default String getActivityName() { return ACTIVITY_NAME;} } - public interface IResponseAccumulationContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.ACCUMULATOR_SCOPE; - default String getActivityName() { return ActivityNames.ACCUMULATING_RESPONSE; } + public interface IResponseAccumulationContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.ACCUMULATING_RESPONSE; + + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + @Override + default String getActivityName() { return ACTIVITY_NAME;} } - public interface IRequestTransformationContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.HTTP_TRANSFORMER_SCOPE; + public interface IRequestTransformationContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.TRANSFORMATION; + + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } - default String getActivityName() { return ActivityNames.TRANSFORMATION; } + @Override default String getActivityName() { return ACTIVITY_NAME;} void onHeaderParse(); void onPayloadParse(); @@ -201,48 +270,57 @@ public interface IRequestTransformationContext void aggregateOutputChunk(int sizeInBytes); } - public interface IScheduledContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; - default String getActivityName() { return ActivityNames.SCHEDULED; } + public interface IScheduledContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.SCHEDULED; + + class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + @Override + default String getActivityName() { return ACTIVITY_NAME;} } - public interface ITargetRequestContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; - default String getActivityName() { return ActivityNames.TARGET_TRANSACTION; } + public interface ITargetRequestContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.TARGET_TRANSACTION; + + @Override default String getActivityName() { return ACTIVITY_NAME;} void onBytesSent(int size); void onBytesReceived(int size); } - public interface IRequestSendingContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; - default String getActivityName() { return ActivityNames.REQUEST_SENDING; } + public interface IRequestSendingContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.REQUEST_SENDING; + @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface IWaitingForHttpResponseContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; - default String getActivityName() { return ActivityNames.WAITING_FOR_RESPONSE; } + public interface IWaitingForHttpResponseContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.WAITING_FOR_RESPONSE; + @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface IReceivingHttpResponseContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.REQUEST_SENDER_SCOPE; - default String getActivityName() { return ActivityNames.RECEIVING_RESPONSE; } + public interface IReceivingHttpResponseContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.RECEIVING_RESPONSE; + @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface ITupleHandlingContext - extends IScopedInstrumentationAttributes, - IWithTypedEnclosingScope { - String SCOPE_NAME = ScopeNames.TRAFFIC_REPLAYER_SCOPE; - default String getActivityName() { return ActivityNames.TUPLE_HANDLING; } + public interface ITupleHandlingContext + extends IAccumulationScope, + IWithTypedEnclosingScope> { + String ACTIVITY_NAME = ActivityNames.TUPLE_HANDLING; + @Override default String getActivityName() { return ACTIVITY_NAME; } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java index f2dd17f89..93e0e0cc8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java @@ -3,5 +3,7 @@ import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; -public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { +public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { + + TrafficSourceContexts.ReadChunkContext createReadChunkContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java index 64fff2976..c3621740f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.replay.tracing; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface ITrafficSourceContexts { @@ -15,18 +16,23 @@ private ActivityNames() {} public static final String WAIT_FOR_NEXT_BACK_PRESSURE_CHECK = "waitForNextBackPressureCheck"; } - interface ITrafficSourceContext extends IScopedInstrumentationAttributes { + interface ITrafficSourceContext extends IScopedInstrumentationAttributes { String SCOPE_NAME = ScopeNames.TRAFFIC_SCOPE; + @Override default String getScopeName() { return SCOPE_NAME; } + } - interface IReadChunkContext extends ITrafficSourceContext { + interface IReadChunkContext extends ITrafficSourceContext { + String ACTIVITY_NAME = ActivityNames.READ_NEXT_TRAFFIC_CHUNK; @Override - default String getActivityName() { return ActivityNames.READ_NEXT_TRAFFIC_CHUNK; } + default String getActivityName() { return ACTIVITY_NAME; } } - interface IBackPressureBlockContext extends ITrafficSourceContext { + interface IBackPressureBlockContext extends ITrafficSourceContext { + String ACTIVITY_NAME = ActivityNames.BACK_PRESSURE_BLOCK; @Override - default String getActivityName() { return ActivityNames.BACK_PRESSURE_BLOCK; } + default String getActivityName() { return ACTIVITY_NAME; } } - interface IWaitForNextSignal extends ITrafficSourceContext { - default String getActivityName() { return ActivityNames.WAIT_FOR_NEXT_BACK_PRESSURE_CHECK; } + interface IWaitForNextSignal extends ITrafficSourceContext { + String ACTIVITY_NAME = ActivityNames.WAIT_FOR_NEXT_BACK_PRESSURE_CHECK; + default String getActivityName() { return ACTIVITY_NAME; } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 08fb8ab5c..128b8f85a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -67,7 +67,7 @@ private void onParitionsAssignedChanged(int delta) { public static class TouchScopeContext extends DirectNestedSpanContext> - implements IKafkaConsumerContexts.ITouchScopeContext + implements IKafkaConsumerContexts.ITouchScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { @@ -85,19 +85,20 @@ public TouchScopeContext(@NonNull IInstrumentationAttributes> - implements IKafkaConsumerContexts.IPollScopeContext { - @Override - public CommonScopedMetricInstruments getMetrics() { - return getRootInstrumentationScope().poll; - } - + extends DirectNestedSpanContext> + implements IKafkaConsumerContexts.IPollScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); } } - public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + + @Override + public CommonScopedMetricInstruments getMetrics() { + return getRootInstrumentationScope().pollInstruments; + } + + public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -105,41 +106,46 @@ public PollScopeContext(@NonNull IInstrumentationAttributes> - implements IKafkaConsumerContexts.ICommitScopeContext { - public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { - super(enclosingScope); - initializeSpan(); + extends DirectNestedSpanContext> + implements IKafkaConsumerContexts.ICommitScopeContext { + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } } @Override - public DoubleHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getCommitDuration(); + public MetricInstruments getMetrics() { + return getRootInstrumentationScope().commitInstruments; } - @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getCommitCounter(); + public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + super(enclosingScope); + initializeSpan(); } + } public static class KafkaCommitScopeContext - extends DirectNestedSpanContext - implements IKafkaConsumerContexts.IKafkaCommitScopeContext { - - public KafkaCommitScopeContext(@NonNull KafkaConsumerContexts.CommitScopeContext enclosingScope) { - super(enclosingScope); - initializeSpan(); + extends DirectNestedSpanContext + implements IKafkaConsumerContexts.IKafkaCommitScopeContext { + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } } @Override - public DoubleHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getKafkaCommitDuration(); + public MetricInstruments getMetrics() { + return getRootInstrumentationScope().kafkaCommitInstruments; } - @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getKafkaCommitCounter(); + + public KafkaCommitScopeContext(@NonNull KafkaConsumerContexts.CommitScopeContext enclosingScope) { + super(enclosingScope); + initializeSpan(); } + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index bd38c2d6e..3f5b4c8a2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -1,12 +1,16 @@ package org.opensearch.migrations.replay.tracing; -import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; +import lombok.NonNull; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IndirectNestedSpanContext; @@ -16,31 +20,37 @@ public class ReplayContexts { + public static final String COUNT_UNIT_STR = "count"; + public static final String BYTES_UNIT_STR = "bytes"; + private ReplayContexts() {} public static class ChannelKeyContext - extends AbstractNestedSpanContext> - implements IReplayContexts.IChannelKeyContext { - + extends AbstractNestedSpanContext> + implements IReplayContexts.IChannelKeyContext { @Getter final ISourceTrafficChannelKey channelKey; - - @Override - public DoubleHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getChannelDuration(); - } - @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getChannelCounter(); - } - - public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { + public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { super(enclosingScope); this.channelKey = channelKey; initializeSpan(); } + public static class MetricInstruments extends CommonScopedMetricInstruments { + final LongUpDownCounter activeChannelCounter; + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + activeChannelCounter = meter + .upDownCounterBuilder(IReplayContexts.MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().channelKeyContext; + } + @Override public String toString() { return channelKey.toString(); @@ -48,91 +58,103 @@ public String toString() { @Override public void onTargetConnectionCreated() { - meterDeltaEvent(getRootInstrumentationScope().getActiveChannelsCounter(), 1); + meterDeltaEvent(getMetrics().activeChannelCounter, 1); } @Override public void onTargetConnectionClosed() { - meterDeltaEvent(getRootInstrumentationScope().getActiveChannelsCounter(), -1); + meterDeltaEvent(getMetrics().activeChannelCounter, -1); } } public static class KafkaRecordContext - extends DirectNestedSpanContext - implements IReplayContexts.IKafkaRecordContext { + extends DirectNestedSpanContext> + implements IReplayContexts.IKafkaRecordContext { final String recordId; - public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, String recordId, - int recordSize) { + public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, + String recordId, int recordSize) { super(enclosingScope); this.recordId = recordId; initializeSpan(); - this.meterIncrementEvent(getRootInstrumentationScope().getKafkaRecordCounter()); - this.meterIncrementEvent(getRootInstrumentationScope().getKafkaRecordBytesCounter(), recordSize); + meterIncrementEvent(getMetrics().recordCounter); + meterIncrementEvent(getMetrics().bytesCounter, recordSize); } - @Override - public String getRecordId() { - return recordId; + public static class MetricInstruments extends CommonScopedMetricInstruments { + final LongCounter recordCounter; + final LongCounter bytesCounter; + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + recordCounter = meter.counterBuilder(IReplayContexts.MetricNames.KAFKA_RECORD_READ) + .setUnit("records").build(); + bytesCounter = meter.counterBuilder(IReplayContexts.MetricNames.KAFKA_BYTES_READ) + .setUnit(BYTES_UNIT_STR).build(); + } } - @Override - public DoubleHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getKafkaRecordDuration(); + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().kafkaRecordContext; } @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getKafkaRecordCounter(); + public String getRecordId() { + return recordId; } } public static class TrafficStreamsLifecycleContext - extends IndirectNestedSpanContext - implements IReplayContexts.ITrafficStreamsLifecycleContext { + extends IndirectNestedSpanContext, IReplayContexts.IChannelKeyContext> + implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; - public TrafficStreamsLifecycleContext(IReplayContexts.IKafkaRecordContext enclosingScope, + public TrafficStreamsLifecycleContext(IReplayContexts.IKafkaRecordContext enclosingScope, ITrafficStreamKey trafficStreamKey) { super(enclosingScope); this.trafficStreamKey = trafficStreamKey; initializeSpan(); - this.meterIncrementEvent(IReplayContexts.MetricNames.TRAFFIC_STREAMS_READ); + meterIncrementEvent(getMetrics().streamsRead); } - @Override - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { - return getLogicalEnclosingScope(); + public static class MetricInstruments extends CommonScopedMetricInstruments { + private final LongCounter streamsRead; + + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + streamsRead = meter.counterBuilder(IReplayContexts.MetricNames.TRAFFIC_STREAMS_READ) + .setUnit("objects").build(); + } } - @Override - public ITrafficStreamKey getTrafficStreamKey() { - return trafficStreamKey; + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().trafficStreamLifecycleContext; } @Override - public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { - return getImmediateEnclosingScope().getLogicalEnclosingScope(); + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + return getLogicalEnclosingScope(); } @Override - public DoubleHistogram getEndOfScopeDurationMetric() { - return getRootInstrumentationScope().getTrafficStreamLifecycleDuration(); + public ITrafficStreamKey getTrafficStreamKey() { + return trafficStreamKey; } @Override - public LongCounter getEndOfScopeCountMetric() { - return getRootInstrumentationScope().getTrafficStreamLifecycleCounter(); + public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { + return getImmediateEnclosingScope().getLogicalEnclosingScope(); } } public static class HttpTransactionContext - extends IndirectNestedSpanContext - implements IReplayContexts.IReplayerHttpTransactionContext { + extends IndirectNestedSpanContext, IReplayContexts.IChannelKeyContext> + implements IReplayContexts.IReplayerHttpTransactionContext { final UniqueReplayerRequestKey replayerRequestKey; @Getter final Instant timeOfOriginalRequest; - public HttpTransactionContext(IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, + public HttpTransactionContext(IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, UniqueReplayerRequestKey replayerRequestKey, Instant timeOfOriginalRequest) { super(enclosingScope); @@ -141,7 +163,22 @@ public HttpTransactionContext(IReplayContexts.ITrafficStreamsLifecycleContext en initializeSpan(); } - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + @Override + public IReplayContexts.ITupleHandlingContext createTupleContext() { + return new ReplayContexts.TupleHandlingContext(this); + } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().httpTransactionContext; + } + + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getLogicalEnclosingScope(); } @@ -156,171 +193,315 @@ public String toString() { } @Override - public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { + public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { return getImmediateEnclosingScope().getLogicalEnclosingScope(); } } public static class RequestAccumulationContext - extends DirectNestedSpanContext - implements IReplayContexts.IRequestAccumulationContext { - public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.IRequestAccumulationContext { + public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().requestAccumContext; + } } public static class ResponseAccumulationContext - extends DirectNestedSpanContext - implements IReplayContexts.IResponseAccumulationContext { - public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.IResponseAccumulationContext { + public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().responseAccumContext; + } } public static class RequestTransformationContext - extends DirectNestedSpanContext - implements IReplayContexts.IRequestTransformationContext { - public RequestTransformationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.IRequestTransformationContext { + public RequestTransformationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } - @Override - public void onHeaderParse() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_HEADER_PARSE); - } - @Override - public void onPayloadParse() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED); - } - @Override - public void onPayloadParseSuccess() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_SUCCESS); - } - @Override - public void onJsonPayloadParseRequired() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_JSON_REQUIRED); - } - @Override - public void onJsonPayloadParseSucceeded() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_JSON_SUCCEEDED); - } - @Override - public void onPayloadBytesIn(int inputSize) { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_BYTES_IN, inputSize); - } - @Override - public void onUncompressedBytesIn(int inputSize) { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_IN, inputSize); - } - @Override - public void onUncompressedBytesOut(int inputSize) { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_OUT, inputSize); - } - @Override - public void onFinalBytesOut(int inputSize) { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_FINAL_PAYLOAD_BYTES_OUT, inputSize); - } - @Override - public void onTransformSuccess() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_SUCCESS); - } - @Override - public void onTransformSkip() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_SKIPPED); - } - @Override - public void onTransformFailure() { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_ERROR); - } - @Override - public void aggregateInputChunk(int sizeInBytes) { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_BYTES_IN, sizeInBytes); - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_IN); - } - @Override - public void aggregateOutputChunk(int sizeInBytes) { - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_BYTES_OUT, sizeInBytes); - meterIncrementEvent(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_OUT); + public static class MetricInstruments extends CommonScopedMetricInstruments { + private final LongCounter headerParses; + private final LongCounter payloadParses; + private final LongCounter payloadSuccessParses; + private final LongCounter jsonPayloadParses; + private final LongCounter jsonTransformSuccess; + private final LongCounter payloadBytesIn; + private final LongCounter uncompressedBytesIn; + private final LongCounter uncompressedBytesOut; + private final LongCounter finalPayloadBytesOut; + private final LongCounter transformSuccess; + private final LongCounter transformSkipped; + private final LongCounter transformError; + private final LongCounter transformBytesIn; + private final LongCounter transformChunksIn; + private final LongCounter transformBytesOut; + private final LongCounter transformChunksOut; + + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + headerParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_HEADER_PARSE) + .setUnit(COUNT_UNIT_STR).build(); + payloadParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED) + .setUnit(COUNT_UNIT_STR).build(); + payloadSuccessParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_SUCCESS) + .setUnit(COUNT_UNIT_STR).build(); + jsonPayloadParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_JSON_REQUIRED) + .setUnit(COUNT_UNIT_STR).build(); + jsonTransformSuccess = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_JSON_SUCCEEDED) + .setUnit(COUNT_UNIT_STR).build(); + payloadBytesIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_BYTES_IN) + .setUnit(BYTES_UNIT_STR).build(); + uncompressedBytesIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_IN) + .setUnit(BYTES_UNIT_STR).build(); + uncompressedBytesOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_OUT) + .setUnit(BYTES_UNIT_STR).build(); + finalPayloadBytesOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_FINAL_PAYLOAD_BYTES_OUT) + .setUnit(BYTES_UNIT_STR).build(); + transformSuccess = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_SUCCESS) + .setUnit(COUNT_UNIT_STR).build(); + transformSkipped = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_SKIPPED) + .setUnit(COUNT_UNIT_STR).build(); + transformError = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_ERROR) + .setUnit(COUNT_UNIT_STR).build(); + transformBytesIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_BYTES_IN) + .setUnit(BYTES_UNIT_STR).build(); + transformChunksIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_IN) + .setUnit(COUNT_UNIT_STR).build(); + transformBytesOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_BYTES_OUT) + .setUnit(BYTES_UNIT_STR).build(); + transformChunksOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_OUT) + .setUnit(COUNT_UNIT_STR).build(); + + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().transformationContext; + } + + @Override public void onHeaderParse() { + meterIncrementEvent(getMetrics().headerParses); + } + @Override public void onPayloadParse() { + meterIncrementEvent(getMetrics().payloadParses); + } + @Override public void onPayloadParseSuccess() { + meterIncrementEvent(getMetrics().payloadSuccessParses); + } + @Override public void onJsonPayloadParseRequired() { + meterIncrementEvent(getMetrics().jsonPayloadParses); + } + @Override public void onJsonPayloadParseSucceeded() { + meterIncrementEvent(getMetrics().jsonTransformSuccess); + } + @Override public void onPayloadBytesIn(int inputSize) { + meterIncrementEvent(getMetrics().payloadBytesIn, inputSize); + } + @Override public void onUncompressedBytesIn(int inputSize) { + meterIncrementEvent(getMetrics().uncompressedBytesIn, inputSize); + } + @Override public void onUncompressedBytesOut(int inputSize) { + meterIncrementEvent(getMetrics().uncompressedBytesOut, inputSize); + } + @Override public void onFinalBytesOut(int inputSize) { + meterIncrementEvent(getMetrics().finalPayloadBytesOut, inputSize); + } + @Override public void onTransformSuccess() { + meterIncrementEvent(getMetrics().transformSuccess); + } + @Override public void onTransformSkip() { + meterIncrementEvent(getMetrics().transformSkipped); + } + @Override public void onTransformFailure() { + meterIncrementEvent(getMetrics().transformError); + } + @Override public void aggregateInputChunk(int sizeInBytes) { + meterIncrementEvent(getMetrics().transformBytesIn, sizeInBytes); + meterIncrementEvent(getMetrics().transformChunksIn); + } + @Override public void aggregateOutputChunk(int sizeInBytes) { + meterIncrementEvent(getMetrics().transformBytesOut, sizeInBytes); + meterIncrementEvent(getMetrics().transformChunksOut); } } public static class ScheduledContext - extends DirectNestedSpanContext - implements IReplayContexts.IScheduledContext { + extends DirectNestedSpanContext> + implements IReplayContexts.IScheduledContext { private final Instant scheduledFor; - public ScheduledContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope, + public ScheduledContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope, Instant scheduledFor) { super(enclosingScope); this.scheduledFor = scheduledFor; initializeSpan(); } + public static class MetricInstruments extends CommonScopedMetricInstruments { + DoubleHistogram lag; + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + lag = meter.histogramBuilder(IReplayContexts.MetricNames.NETTY_SCHEDULE_LAG).setUnit("ms").build(); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().scheduledContext; + } + @Override public void sendMeterEventsForEnd() { super.sendMeterEventsForEnd(); - meterHistogramMillis(IReplayContexts.MetricNames.NETTY_SCHEDULE_LAG, - Duration.between(scheduledFor, Instant.now())); - + meterHistogramMillis(getMetrics().lag, Duration.between(scheduledFor, Instant.now())); } } public static class TargetRequestContext - extends DirectNestedSpanContext - implements IReplayContexts.ITargetRequestContext { - public TargetRequestContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.ITargetRequestContext { + public TargetRequestContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); - meterHistogramMillis(IReplayContexts.MetricNames.SOURCE_TO_TARGET_REQUEST_LAG, + meterHistogramMillis(getMetrics().sourceTargetGap, Duration.between(enclosingScope.getTimeOfOriginalRequest(), Instant.now())); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + + private final DoubleHistogram sourceTargetGap; + private final LongCounter bytesWritten; + private final LongCounter bytesRead; + + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + sourceTargetGap = meter.histogramBuilder(IReplayContexts.MetricNames.SOURCE_TO_TARGET_REQUEST_LAG) + .setUnit("ms").build(); + bytesWritten = meter.counterBuilder(IReplayContexts.MetricNames.BYTES_WRITTEN_TO_TARGET) + .setUnit(BYTES_UNIT_STR).build(); + bytesRead = meter.counterBuilder(IReplayContexts.MetricNames.BYTES_READ_FROM_TARGET) + .setUnit(BYTES_UNIT_STR).build(); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().targetRequestContext; + } + @Override public void onBytesSent(int size) { - meterIncrementEvent(IReplayContexts.MetricNames.BYTES_WRITTEN_TO_TARGET, size); + meterIncrementEvent(getMetrics().bytesWritten, size); } @Override public void onBytesReceived(int size) { - meterIncrementEvent(IReplayContexts.MetricNames.BYTES_READ_FROM_TARGET, size); + meterIncrementEvent(getMetrics().bytesRead, size); } } public static class RequestSendingContext - extends DirectNestedSpanContext - implements IReplayContexts.IRequestSendingContext { - public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.IRequestSendingContext { + public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().requestSendingContext; + } } public static class WaitingForHttpResponseContext - extends DirectNestedSpanContext - implements IReplayContexts.IWaitingForHttpResponseContext { - public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.IWaitingForHttpResponseContext { + public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().waitingForHttpResponseContext; + } + } public static class ReceivingHttpResponseContext - extends DirectNestedSpanContext - implements IReplayContexts.IReceivingHttpResponseContext { - public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.IReceivingHttpResponseContext { + public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().receivingHttpContext; + } + } public static class TupleHandlingContext - extends DirectNestedSpanContext - implements IReplayContexts.ITupleHandlingContext { - public TupleHandlingContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext> + implements IReplayContexts.ITupleHandlingContext { + public TupleHandlingContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + } + } + + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().tupleHandlingContext; + } + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 387b84be1..d0571aae1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -5,21 +5,74 @@ import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.MeterProvider; +import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; +import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.tracing.RootOtelContext; import lombok.Getter; @Getter -public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { +public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { public final KafkaConsumerContexts.AsyncListeningContext.MetricInstruments asyncListeningInstruments; public final KafkaConsumerContexts.TouchScopeContext.MetricInstruments touchInstruments; public final KafkaConsumerContexts.PollScopeContext.MetricInstruments pollInstruments; + public final KafkaConsumerContexts.CommitScopeContext.MetricInstruments commitInstruments; + public final KafkaConsumerContexts.KafkaCommitScopeContext.MetricInstruments kafkaCommitInstruments; + + public final TrafficSourceContexts.ReadChunkContext.MetricInstruments readChunkInstruments; + public final TrafficSourceContexts.BackPressureBlockContext.MetricInstruments backPressureInstruments; + public final TrafficSourceContexts.WaitForNextSignal.MetricInstruments waitForNextSignalInstruments; + + public final ReplayContexts.ChannelKeyContext.MetricInstruments channelKeyContext; + public final ReplayContexts.KafkaRecordContext.MetricInstruments kafkaRecordContext; + public final ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments trafficStreamLifecycleContext; + public final ReplayContexts.HttpTransactionContext.MetricInstruments httpTransactionContext; + public final ReplayContexts.RequestAccumulationContext.MetricInstruments requestAccumContext; + public final ReplayContexts.ResponseAccumulationContext.MetricInstruments responseAccumContext; + public final ReplayContexts.RequestTransformationContext.MetricInstruments transformationContext; + public final ReplayContexts.ScheduledContext.MetricInstruments scheduledContext; + public final ReplayContexts.TargetRequestContext.MetricInstruments targetRequestContext; + public final ReplayContexts.RequestSendingContext.MetricInstruments requestSendingContext; + public final ReplayContexts.WaitingForHttpResponseContext.MetricInstruments waitingForHttpResponseContext; + public final ReplayContexts.ReceivingHttpResponseContext.MetricInstruments receivingHttpContext; + public final ReplayContexts.TupleHandlingContext.MetricInstruments tupleHandlingContext; + + public final InputStreamOfTraffic.IOSTrafficStreamContext.MetricInstruments directInputStreamContext; public RootReplayerContext(OpenTelemetry sdk) { super(sdk); var meterProvider = this.getMeterProvider(); + asyncListeningInstruments = new KafkaConsumerContexts.AsyncListeningContext.MetricInstruments(meterProvider); touchInstruments = new KafkaConsumerContexts.TouchScopeContext.MetricInstruments(meterProvider); + pollInstruments = new KafkaConsumerContexts.PollScopeContext.MetricInstruments(meterProvider); + commitInstruments = new KafkaConsumerContexts.CommitScopeContext.MetricInstruments(meterProvider); + kafkaCommitInstruments = new KafkaConsumerContexts.KafkaCommitScopeContext.MetricInstruments(meterProvider); + + directInputStreamContext = new InputStreamOfTraffic.IOSTrafficStreamContext.MetricInstruments(meterProvider); + + readChunkInstruments = new TrafficSourceContexts.ReadChunkContext.MetricInstruments(meterProvider); + backPressureInstruments = new TrafficSourceContexts.BackPressureBlockContext.MetricInstruments(meterProvider); + waitForNextSignalInstruments = new TrafficSourceContexts.WaitForNextSignal.MetricInstruments(meterProvider); + + + channelKeyContext = new ReplayContexts.ChannelKeyContext.MetricInstruments(meterProvider); + kafkaRecordContext = new ReplayContexts.KafkaRecordContext.MetricInstruments(meterProvider); + trafficStreamLifecycleContext = new ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments(meterProvider); + httpTransactionContext = new ReplayContexts.HttpTransactionContext.MetricInstruments(meterProvider); + requestAccumContext = new ReplayContexts.RequestAccumulationContext.MetricInstruments(meterProvider); + responseAccumContext = new ReplayContexts.ResponseAccumulationContext.MetricInstruments(meterProvider); + transformationContext = new ReplayContexts.RequestTransformationContext.MetricInstruments(meterProvider); + scheduledContext = new ReplayContexts.ScheduledContext.MetricInstruments(meterProvider); + targetRequestContext = new ReplayContexts.TargetRequestContext.MetricInstruments(meterProvider); + requestSendingContext = new ReplayContexts.RequestSendingContext.MetricInstruments(meterProvider); + waitingForHttpResponseContext = new ReplayContexts.WaitingForHttpResponseContext.MetricInstruments(meterProvider); + receivingHttpContext = new ReplayContexts.ReceivingHttpResponseContext.MetricInstruments(meterProvider); + tupleHandlingContext = new ReplayContexts.TupleHandlingContext.MetricInstruments(meterProvider); + } + @Override + public TrafficSourceContexts.ReadChunkContext createReadChunkContext() { + return new TrafficSourceContexts.ReadChunkContext(this); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index dd5445c50..820e7b2ac 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -1,6 +1,8 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.metrics.MeterProvider; import lombok.NonNull; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -8,30 +10,59 @@ public class TrafficSourceContexts { private TrafficSourceContexts() {} - public static class ReadChunkContext - extends DirectNestedSpanContext - implements ITrafficSourceContexts.IReadChunkContext + public static class ReadChunkContext + extends DirectNestedSpanContext> + implements ITrafficSourceContexts.IReadChunkContext { - public ReadChunkContext(T enclosingScope) { + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } + } + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().readChunkInstruments; + } + + public ReadChunkContext(IInstrumentationAttributes enclosingScope) { super(enclosingScope); initializeSpan(); } } public static class BackPressureBlockContext - extends DirectNestedSpanContext - implements ITrafficSourceContexts.IBackPressureBlockContext + extends DirectNestedSpanContext> + implements ITrafficSourceContexts.IBackPressureBlockContext { - public BackPressureBlockContext(@NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } + } + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().backPressureInstruments; + } + + public BackPressureBlockContext(@NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { super(enclosingScope); initializeSpan(); } } public static class WaitForNextSignal - extends DirectNestedSpanContext - implements ITrafficSourceContexts.IWaitForNextSignal { - public WaitForNextSignal(@NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { + extends DirectNestedSpanContext> + implements ITrafficSourceContexts.IWaitForNextSignal { + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + } + } + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().waitForNextSignalInstruments; + } + + public WaitForNextSignal(@NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { super(enclosingScope); initializeSpan(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index cb2065fc5..635a381db 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -1,11 +1,15 @@ package org.opensearch.migrations.replay.traffic.source; import com.google.protobuf.Timestamp; +import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.tracing.TrafficSourceContexts; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.slf4j.event.Level; @@ -44,6 +48,7 @@ public class BlockingTrafficSource implements ITrafficCaptureSource, BufferedFlo * Limit the number of readers to one at a time and only if we haven't yet maxed out our time buffer */ private final Semaphore readGate; + @Getter private final Duration bufferTimeWindow; public BlockingTrafficSource(ISimpleTrafficCaptureSource underlying, Duration bufferTimeWindow) { @@ -78,10 +83,6 @@ public void stopReadsPast(Instant pointInTime) { } } - public Duration getBufferTimeWindow() { - return bufferTimeWindow; - } - /** * Reads the next chunk that is available before the current stopReading barrier. However, * that barrier isn't meant to be a tight barrier with immediate effect. @@ -90,8 +91,8 @@ public Duration getBufferTimeWindow() { */ @Override public CompletableFuture> - readNextTrafficStreamChunk(IInstrumentationAttributes context) { - var readContext = new TrafficSourceContexts.ReadChunkContext(context); + readNextTrafficStreamChunk(IRootReplayerContext context) { + var readContext = context.createReadChunkContext(); log.info("BlockingTrafficSource::readNext"); var trafficStreamListFuture = CompletableFuture .supplyAsync(() -> blockIfNeeded(readContext), task -> new Thread(task).start()) @@ -117,11 +118,11 @@ public Duration getBufferTimeWindow() { }); } - private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) { + private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) { if (stopReadingAtRef.get().equals(Instant.EPOCH)) { return null; } log.atInfo().setMessage(() -> "stopReadingAtRef=" + stopReadingAtRef + " lastTimestampSecondsRef=" + lastTimestampSecondsRef).log(); - ITrafficSourceContexts.IBackPressureBlockContext blockContext = null; + ITrafficSourceContexts.IBackPressureBlockContext blockContext = null; while (stopReadingAtRef.get().isBefore(lastTimestampSecondsRef.get())) { if (blockContext == null) { blockContext = new TrafficSourceContexts.BackPressureBlockContext(readContext); @@ -167,7 +168,7 @@ private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) } @Override - public CommitResult commitTrafficStream(IInstrumentationAttributes context, + public CommitResult commitTrafficStream(IInstrumentationAttributes context, ITrafficStreamKey trafficStreamKey) throws IOException { var commitResult = underlyingSource.commitTrafficStream(context, trafficStreamKey); if (commitResult == CommitResult.AfterNextRead) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java index df28d5aed..4c76cf2ad 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay.traffic.source; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; @@ -17,12 +18,12 @@ enum CommitResult { Immediate, AfterNextRead, BlockedByOtherCommits, Ignored } - CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context); + CompletableFuture> readNextTrafficStreamChunk(IRootReplayerContext context); /** * Returns true if the committed results are immediate */ - CommitResult commitTrafficStream(IInstrumentationAttributes context, + CommitResult commitTrafficStream(IInstrumentationAttributes context, ITrafficStreamKey trafficStreamKey) throws IOException; default void close() throws IOException {} @@ -31,7 +32,7 @@ default void close() throws IOException {} * Keep-alive call to be used by the BlockingTrafficSource to keep this connection alive if * this is required. */ - default void touch(IInstrumentationAttributes context) {} + default void touch(IInstrumentationAttributes context) {} /** * @return The time that the next call to touch() must be completed for this source to stay diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 5c88eda46..076718fd6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -1,5 +1,7 @@ package org.opensearch.migrations.replay.traffic.source; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; import lombok.Lombok; import lombok.NonNull; @@ -8,6 +10,9 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -22,34 +27,46 @@ @Slf4j public class InputStreamOfTraffic implements ISimpleTrafficCaptureSource { - private static final String TELEMETRY_SCOPE_NAME = "InputStreamOfTraffic"; private final InputStream inputStream; private final AtomicInteger trafficStreamsRead = new AtomicInteger(); private final ChannelContextManager channelContextManager; - public InputStreamOfTraffic(IInstrumentationAttributes context, InputStream inputStream) { + public InputStreamOfTraffic(IInstrumentationAttributes context, InputStream inputStream) { this.channelContextManager = new ChannelContextManager(context); this.inputStream = inputStream; } - private static class IOSTrafficStreamContext - extends DirectNestedSpanContext - implements IReplayContexts.ITrafficStreamsLifecycleContext { - public static final String SCOPE_NAME = TELEMETRY_SCOPE_NAME; - + public static class IOSTrafficStreamContext + extends DirectNestedSpanContext> + implements IReplayContexts.ITrafficStreamsLifecycleContext { @Getter private final ITrafficStreamKey trafficStreamKey; - public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, ITrafficStreamKey tsk) { + public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, + ITrafficStreamKey tsk) { super(ctx); this.trafficStreamKey = tsk; initializeSpan(); } + public static class MetricInstruments extends CommonScopedMetricInstruments { + final LongUpDownCounter activeChannelCounter; + public MetricInstruments(MeterProvider meterProvider) { + super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + var meter = meterProvider.get(SCOPE_NAME2); + activeChannelCounter = meter + .upDownCounterBuilder(IReplayContexts.MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); + } + } + + public @NonNull ReplayContexts.ChannelKeyContext.MetricInstruments getMetrics() { + return getRootInstrumentationScope().channelKeyContext; + } + @Override public String getActivityName() { return "trafficStreamLifecycle"; } @Override - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getImmediateEnclosingScope(); } } @@ -61,7 +78,7 @@ public IReplayContexts.IChannelKeyContext getChannelKeyContext() { * @return */ public CompletableFuture> - readNextTrafficStreamChunk(IInstrumentationAttributes context) { + readNextTrafficStreamChunk(IInstrumentationAttributes context) { return CompletableFuture.supplyAsync(() -> { var builder = TrafficStream.newBuilder(); try { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 8251da565..c653f775c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -226,16 +226,16 @@ void generateAndTest(String testName, int bufferSize, int skipCount, new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey key, + public void onRequestReceived(@NonNull UniqueReplayerRequestKey key, IReplayContexts.IReplayerHttpTransactionContext ctx, - HttpMessageAndTimestamp request) { + @NonNull HttpMessageAndTimestamp request) { requestsReceived.incrementAndGet(); } @Override - public void onFullDataReceived(UniqueReplayerRequestKey requestKey, + public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext ctx, - RequestResponsePacketPair fullPair) { + @NonNull RequestResponsePacketPair fullPair) { var sourceIdx = requestKey.getSourceRequestIndex(); if (fullPair.completionStatus == RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index d9f9c144c..a338cf808 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -158,18 +158,18 @@ public void testReader() throws Exception { new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey id, + public void onRequestReceived(@NonNull UniqueReplayerRequestKey id, IReplayContexts.IReplayerHttpTransactionContext ctx, - HttpMessageAndTimestamp request) { + @NonNull HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(bytesList)); } @Override - public void onFullDataReceived(UniqueReplayerRequestKey key, + public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, IReplayContexts.IReplayerHttpTransactionContext ctx, - RequestResponsePacketPair fullPair) { + @NonNull RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); } @@ -213,18 +213,18 @@ public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { "CapturedTrafficToHttpTransactionAccumulator that's being used in this unit test!", new AccumulationCallbacks() { @Override - public void onRequestReceived(UniqueReplayerRequestKey id, + public void onRequestReceived(@NonNull UniqueReplayerRequestKey id, IReplayContexts.IReplayerHttpTransactionContext ctx, - HttpMessageAndTimestamp request) { + @NonNull HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(bytesList)); } @Override - public void onFullDataReceived(UniqueReplayerRequestKey key, + public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, IReplayContexts.IReplayerHttpTransactionContext ctx, - RequestResponsePacketPair fullPair) { + @NonNull RequestResponsePacketPair fullPair) { var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); } From 5dc32d9bf3540b280095d0fac5773bb195041728 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 11 Jan 2024 15:48:35 -0500 Subject: [PATCH 55/94] Stop passing the Root telemetry scope as a generic parameter to all of the instrumentation interfaces. That was only needed for one call, which is easily inlined into the implementation classes by putting createChildContext() calls into each parent. That allows the implementations to go back to their own root scopes and do whatever is necessary. Lots of code is a lot easier to read and maintain now. There are still other changes that I'm in the process of making, including supporting linked spans and squashing compilation errors (still). Signed-off-by: Greg Schohn --- .../tracing/ConnectionContext.java | 49 ++++-- .../tracing/IRootOffloaderContext.java | 11 -- .../tracing/RootOffloaderContext.java | 13 +- ...ontext.java => BaseNestedSpanContext.java} | 18 +- .../tracing/DirectNestedSpanContext.java | 15 +- .../tracing/IHasRootInstrumentationScope.java | 11 ++ .../tracing/IInstrumentConstructor.java | 3 +- .../tracing/IInstrumentationAttributes.java | 8 +- .../migrations/tracing/IRootOtelContext.java | 5 +- .../IScopedInstrumentationAttributes.java | 4 +- .../tracing/IWithStartTimeAndAttributes.java | 3 +- .../tracing/IWithTypedEnclosingScope.java | 2 +- .../tracing/IndirectNestedSpanContext.java | 13 +- .../migrations/tracing/RootOtelContext.java | 31 ++-- .../commoncontexts/IConnectionContext.java | 7 +- .../IHttpTransactionContext.java | 2 +- .../replay/AccumulationCallbacks.java | 11 +- ...edTrafficToHttpTransactionAccumulator.java | 4 +- .../replay/RequestResponsePacketPair.java | 23 ++- .../replay/RequestSenderOrchestrator.java | 6 +- .../replay/TrafficCaptureSourceFactory.java | 5 +- .../migrations/replay/TrafficReplayer.java | 40 ++--- .../NettyPacketToHttpConsumer.java | 24 +-- .../http/HttpJsonTransformingConsumer.java | 16 +- ...dHttpRequestPreliminaryConvertHandler.java | 4 +- .../http/RequestPipelineOrchestrator.java | 4 +- .../datatypes/ISourceTrafficChannelKey.java | 2 +- .../kafka/KafkaTrafficCaptureSource.java | 13 +- .../replay/kafka/TrackingKafkaConsumer.java | 42 ++--- .../TrafficStreamKeyWithKafkaRecordId.java | 3 +- .../replay/tracing/ChannelContextManager.java | 6 +- .../tracing/IKafkaConsumerContexts.java | 16 +- .../replay/tracing/IReplayContexts.java | 159 +++++++----------- .../replay/tracing/IRootReplayerContext.java | 6 +- .../tracing/ITrafficSourceContexts.java | 21 ++- .../replay/tracing/KafkaConsumerContexts.java | 53 +++--- .../replay/tracing/ReplayContexts.java | 141 ++++++++++------ .../replay/tracing/RootReplayerContext.java | 13 +- .../replay/tracing/TrafficSourceContexts.java | 60 +++++-- .../traffic/source/BlockingTrafficSource.java | 33 ++-- .../source/ISimpleTrafficCaptureSource.java | 5 + .../traffic/source/ITrafficCaptureSource.java | 18 +- .../traffic/source/InputStreamOfTraffic.java | 35 ++-- 43 files changed, 533 insertions(+), 425 deletions(-) delete mode 100644 TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java rename TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/{AbstractNestedSpanContext.java => BaseNestedSpanContext.java} (66%) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 870e498d4..dff389608 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -1,18 +1,26 @@ package org.opensearch.migrations.trafficcapture.tracing; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; -import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import lombok.NonNull; +import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.AttributeNameMatchingPredicate; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.FilteringAttributeBuilder; -import org.opensearch.migrations.tracing.IRootOtelContext; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -public class ConnectionContext extends AbstractNestedSpanContext implements IConnectionContext { +public class ConnectionContext extends BaseNestedSpanContext + implements IConnectionContext { + public static final String SCOPE_NAME = "Channel"; + private static final AttributeNameMatchingPredicate KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT = AttributeNameMatchingPredicate.builder(true).add(CONNECTION_ID_ATTR.getKey()).build(); public static final String ACTIVE_CONNECTION = "activeConnection"; + public static final String ACTIVITY_NAME = "captureConnection"; @Getter public final String connectionId; @@ -20,24 +28,35 @@ public class ConnectionContext extends AbstractNestedSpanContext> - implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, AutoCloseable { +public abstract class BaseNestedSpanContext + + implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, IHasRootInstrumentationScope, AutoCloseable { final T enclosingScope; @Getter final Instant startTime; @Getter private Span currentSpan; @Getter private final S rootInstrumentationScope; - protected AbstractNestedSpanContext(T enclosingScope) { + protected BaseNestedSpanContext(S rootScope, T enclosingScope) { this.enclosingScope = enclosingScope; this.startTime = Instant.now(); - this.rootInstrumentationScope = (S) enclosingScope.getRootInstrumentationScope(); + this.rootInstrumentationScope = rootScope; } @Override - public IInstrumentationAttributes getEnclosingScope() { + public IInstrumentationAttributes getEnclosingScope() { return enclosingScope; } @@ -34,8 +34,12 @@ protected void initializeSpan() { } protected void initializeSpan(AttributesBuilder attributesBuilder) { + initializeSpan(null, attributesBuilder); + } + + protected void initializeSpan(Span linkedSpan, AttributesBuilder attributesBuilder) { initializeSpan(rootInstrumentationScope.buildSpan(enclosingScope, getScopeName(), getActivityName(), - attributesBuilder)); + linkedSpan, attributesBuilder)); } public void initializeSpan(@NonNull Span s) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java index 926d2820c..6a2376548 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java @@ -1,16 +1,17 @@ package org.opensearch.migrations.tracing; public abstract class DirectNestedSpanContext> - extends AbstractNestedSpanContext - implements IWithTypedEnclosingScope + T extends IInstrumentationAttributes & IHasRootInstrumentationScope, + L> + extends BaseNestedSpanContext + implements IWithTypedEnclosingScope { - public DirectNestedSpanContext(T enclosingScope) { - super(enclosingScope); + protected DirectNestedSpanContext(T parent) { + super(parent.getRootInstrumentationScope(), parent); } @Override - public T getLogicalEnclosingScope() { - return (T) getEnclosingScope(); + public L getLogicalEnclosingScope() { + return (L) getEnclosingScope(); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java new file mode 100644 index 000000000..b9381b1cf --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java @@ -0,0 +1,11 @@ +package org.opensearch.migrations.tracing; + +import lombok.Getter; + +/** + * This exists as helper glue to make pattern matching in the generics + * work to allow for more simplified constructors. + */ +public interface IHasRootInstrumentationScope { + S getRootInstrumentationScope(); +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index a8ef5dee1..f5ed0f433 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -4,7 +4,6 @@ import io.opentelemetry.api.trace.Span; public interface IInstrumentConstructor { - - Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, + Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 7e441faec..612999a2a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -9,10 +9,9 @@ import java.util.ArrayList; -public interface IInstrumentationAttributes { +public interface IInstrumentationAttributes { String getScopeName(); - IInstrumentationAttributes getEnclosingScope(); - @NonNull S getRootInstrumentationScope(); + IInstrumentationAttributes getEnclosingScope(); default Span getCurrentSpan() { return null; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { @@ -25,7 +24,7 @@ default Attributes getPopulatedAttributes(AttributesBuilder builder) { default AttributesBuilder getPopulatedAttributesBuilder(AttributesBuilder builder) { var currentObj = this; - var stack = new ArrayList>(); + var stack = new ArrayList(); while (currentObj != null) { stack.add(currentObj); currentObj = currentObj.getEnclosingScope(); @@ -59,5 +58,4 @@ default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder c.add(delta); } } - } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java index e30a1665f..c98382053 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java @@ -3,9 +3,6 @@ import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; -public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { +public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { MeterProvider getMeterProvider(); - default Meter getMeterForScope(String scopeName) { - return getMeterProvider().get(scopeName); - } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 7fba4c9cb..434d8cd12 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -5,8 +5,8 @@ import io.opentelemetry.api.trace.Span; import lombok.NonNull; -public interface IScopedInstrumentationAttributes - extends IWithStartTimeAndAttributes, AutoCloseable { +public interface IScopedInstrumentationAttributes + extends IWithStartTimeAndAttributes, AutoCloseable { String getActivityName(); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 582967a3b..7234f885b 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -1,13 +1,12 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; import java.time.Duration; import java.time.Instant; -public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { +public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { Instant getStartTime(); default void meterHistogramMillis(DoubleHistogram histogram) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java index 6b4af0f34..1802e9649 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithTypedEnclosingScope.java @@ -1,5 +1,5 @@ package org.opensearch.migrations.tracing; -public interface IWithTypedEnclosingScope extends IInstrumentationAttributes { +public interface IWithTypedEnclosingScope extends IInstrumentationAttributes { T getLogicalEnclosingScope(); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java index eae8f0b4e..0d0bcda21 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java @@ -2,11 +2,14 @@ import lombok.NonNull; -public abstract class IndirectNestedSpanContext - , L extends IInstrumentationAttributes> - extends AbstractNestedSpanContext { - public IndirectNestedSpanContext(@NonNull D enclosingScope) { - super(enclosingScope); +public abstract class IndirectNestedSpanContext, + L> + extends BaseNestedSpanContext + implements IWithTypedEnclosingScope +{ + protected IndirectNestedSpanContext(@NonNull D enclosingScope) { + super(enclosingScope.getRootInstrumentationScope(), enclosingScope); } public abstract L getLogicalEnclosingScope(); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index a15c61b8b..e6a0fb817 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -3,7 +3,6 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; @@ -26,7 +25,7 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; -public class RootOtelContext implements IRootOtelContext { +public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @@ -93,7 +92,7 @@ public String getScopeName() { } @Override - public RootOtelContext getEnclosingScope() { + public RootOtelContext getEnclosingScope() { return null; } @@ -106,29 +105,29 @@ public MeterProvider getMeterProvider() { return getOpenTelemetry().getMeterProvider(); } - @Override - @NonNull - public S getRootInstrumentationScope() { - return (S) this; - } // CRTP so that callers can get more specific - @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; // nothing more to do } - private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan) { - return Optional.ofNullable(parentSpan).map(p -> builder.setParent(Context.current().with(p))) - .orElseGet(builder::setNoParent) + private static SpanBuilder addLinkedToBuilder(Span linkedSpanContext, SpanBuilder spanBuilder) { + return Optional.ofNullable(linkedSpanContext) + .map(Span::getSpanContext).map(spanBuilder::addLink).orElse(spanBuilder); + } + + private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan, + Span linkedSpanContext) { + return addLinkedToBuilder(linkedSpanContext, Optional.ofNullable(parentSpan) + .map(p -> builder.setParent(Context.current().with(p))) + .orElseGet(builder::setNoParent)) .startSpan().setAllAttributes(attrs); } @Override - public - Span buildSpan(IInstrumentationAttributes enclosingScope, - String scopeName, String spanName, AttributesBuilder attributesBuilder) { + public Span buildSpan(IInstrumentationAttributes enclosingScope, + String scopeName, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); - return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan); + return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan, linkedSpan); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index a7f9c4c69..845b13f40 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -2,13 +2,10 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IConnectionContext extends IScopedInstrumentationAttributes { - String CHANNEL_SCOPE = "Channel"; - String SCOPE_NAME = CHANNEL_SCOPE; +public interface IConnectionContext extends IScopedInstrumentationAttributes { static final AttributeKey CONNECTION_ID_ATTR = AttributeKey.stringKey("connectionId"); static final AttributeKey NODE_ID_ATTR = AttributeKey.stringKey("nodeId"); @@ -16,7 +13,7 @@ public interface IConnectionContext extends IS String getNodeId(); @Override - default IInstrumentationAttributes getEnclosingScope() { return null; } + default IInstrumentationAttributes getEnclosingScope() { return null; } @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index 871e88ed0..2a4f3c495 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -6,7 +6,7 @@ import org.opensearch.migrations.tracing.IRootOtelContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { +public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { static final AttributeKey SOURCE_REQUEST_INDEX_KEY = AttributeKey.longKey("sourceRequestIndex"); long getSourceRequestIndex(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index 0b9813975..8214ec3d9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -5,26 +5,25 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import java.time.Instant; import java.util.List; public interface AccumulationCallbacks { void onRequestReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request); void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrpp); void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx); + IReplayContexts.IChannelKeyContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 2cb411ed1..a8a11ccab 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -105,7 +105,7 @@ public void onExpireAccumulation(String partitionId, Accumulation accumulation) @AllArgsConstructor private static class SpanWrappingAccumulationCallbacks { private final AccumulationCallbacks underlying; - public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, + public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { requestCtx.close(); underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), @@ -129,7 +129,7 @@ public void onConnectionClose(@NonNull Accumulation accum, } public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.ITrafficStreamsLifecycleContext tsCtx, + IReplayContexts.ITrafficStreamsLifecycleContext tsCtx, @NonNull List trafficStreamKeysBeingHeld) { underlying.onTrafficStreamsExpired(status, tsCtx.getLogicalEnclosingScope(), trafficStreamKeysBeingHeld); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 4a44f19be..303d53efc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -34,16 +34,15 @@ public enum ReconstructionStatus { ReconstructionStatus completionStatus; // switch between RequestAccumulation/ResponseAccumulation objects when we're parsing, // or just leave this null, in which case, the context from the trafficStreamKey should be used - private IScopedInstrumentationAttributes requestOrResponseAccumulationContext; + private IScopedInstrumentationAttributes requestOrResponseAccumulationContext; public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStreamKey, Instant sourceTimestamp, int startingSourceRequestIndex, int indexOfCurrentRequest) { this.firstTrafficStreamKeyForRequest = startingAtTrafficStreamKey; var requestKey = new UniqueReplayerRequestKey(startingAtTrafficStreamKey, startingSourceRequestIndex, indexOfCurrentRequest); - var httpTransactionContext = new ReplayContexts.HttpTransactionContext( - startingAtTrafficStreamKey.getTrafficStreamsContext(), - requestKey, sourceTimestamp); + var httpTransactionContext = startingAtTrafficStreamKey.getTrafficStreamsContext() + .createHttpTransactionContext(requestKey, sourceTimestamp); requestOrResponseAccumulationContext = new ReplayContexts.RequestAccumulationContext(httpTransactionContext); } @@ -51,7 +50,7 @@ public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStr return firstTrafficStreamKeyForRequest; } - public IReplayContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { + public IReplayContexts.IReplayerHttpTransactionContext getHttpTransactionContext() { var looseCtx = requestOrResponseAccumulationContext; // the req/response ctx types in the assert below will always implement this with the // IReplayerHttpTransactionContext parameter, but this seems clearer @@ -59,24 +58,24 @@ public IReplayContexts.IReplayerHttpTransactionContext get assert looseCtx instanceof IWithTypedEnclosingScope; assert looseCtx instanceof IReplayContexts.IRequestAccumulationContext || looseCtx instanceof IReplayContexts.IResponseAccumulationContext; - return ((IWithTypedEnclosingScope>) looseCtx) + return ((IWithTypedEnclosingScope) looseCtx) .getLogicalEnclosingScope(); } - public @NonNull IReplayContexts.IRequestAccumulationContext getRequestContext() { - return (IReplayContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; + public @NonNull IReplayContexts.IRequestAccumulationContext getRequestContext() { + return (IReplayContexts.IRequestAccumulationContext) requestOrResponseAccumulationContext; } - public @NonNull IReplayContexts.IResponseAccumulationContext getResponseContext() { - return (IReplayContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; + public @NonNull IReplayContexts.IResponseAccumulationContext getResponseContext() { + return (IReplayContexts.IResponseAccumulationContext) requestOrResponseAccumulationContext; } public void rotateRequestGatheringToResponse() { var looseCtx = requestOrResponseAccumulationContext; assert looseCtx instanceof IReplayContexts.IRequestAccumulationContext; - requestOrResponseAccumulationContext = new ReplayContexts.ResponseAccumulationContext( - getRequestContext().getLogicalEnclosingScope()); + requestOrResponseAccumulationContext = + getRequestContext().getLogicalEnclosingScope().createAccumulatorContext(); } public void addRequestData(Instant packetTimeStamp, byte[] data) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index ab8338247..d31a90011 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -44,7 +44,7 @@ public RequestSenderOrchestrator(ClientConnectionPool clientConnectionPool) { new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"waiting for final signal to confirm processing work has finished"); log.atDebug().setMessage(()->"Scheduling work for "+ctx.getConnectionId()+" at time "+timestamp).log(); - var scheduledContext = new ReplayContexts.ScheduledContext(ctx, timestamp); + var scheduledContext = ctx.createScheduledContext(timestamp); // this method doesn't use the scheduling that scheduleRequest and scheduleClose use because // doing work associated with a connection is considered to be preprocessing work independent // of the underlying network connection itself, so it's fair to be able to do this without @@ -136,7 +136,7 @@ public StringTrackableCompletableFuture scheduleClose(IReplayContexts.ICha replaySession.scheduleSequencer).log(); replaySession.scheduleSequencer.add(channelInteractionNumber, () -> successFn.accept(channelFutureAndRequestSchedule), - x -> x.run()); + Runnable::run); if (replaySession.scheduleSequencer.hasPending()) { log.atDebug().setMessage(()->"Sequencer for "+ctx+ " = "+replaySession.scheduleSequencer).log(); @@ -218,7 +218,7 @@ private void scheduleSendOnConnectionReplaySession(IReplayContexts.IReplayerHttp packetReceiverRef), eventLoop, packets.iterator(), start, interval, new AtomicInteger(), responseFuture); }; - var scheduledContext = new ReplayContexts.ScheduledContext(ctx, start); + var scheduledContext = ctx.createScheduledContext(start); scheduleOnConnectionReplaySession(ctx.getLogicalEnclosingScope(), ctx.getReplayerRequestKey().getSourceRequestIndex(), channelFutureAndRequestSchedule, responseFuture, start, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java index ea8a5f928..ede09d74d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java @@ -4,6 +4,7 @@ import org.opensearch.migrations.replay.kafka.KafkaBehavioralPolicy; import org.opensearch.migrations.replay.kafka.KafkaTrafficCaptureSource; import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; @@ -21,13 +22,13 @@ public class TrafficCaptureSourceFactory { private TrafficCaptureSourceFactory() {} public static BlockingTrafficSource - createTrafficCaptureSource(IInstrumentationAttributes ctx, + createTrafficCaptureSource(RootReplayerContext ctx, TrafficReplayer.Parameters appParams, Duration bufferTimeWindow) throws IOException { return new BlockingTrafficSource(createUnbufferedTrafficCaptureSource(ctx, appParams), bufferTimeWindow); } public static ISimpleTrafficCaptureSource - createUnbufferedTrafficCaptureSource(IInstrumentationAttributes ctx, + createUnbufferedTrafficCaptureSource(RootReplayerContext ctx, TrafficReplayer.Parameters appParams) throws IOException { boolean isKafkaActive = TrafficReplayer.validateRequiredKafkaParams(appParams.kafkaTrafficBrokers, appParams.kafkaTrafficTopic, appParams.kafkaTrafficGroupId); boolean isInputFileActive = appParams.inputFilename != null; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index afed17e32..956c3ba36 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -206,7 +206,7 @@ public static boolean validateRequiredKafkaParams(String brokers, String topic, return true; } - static class Parameters { + public static class Parameters { @Parameter(required = true, arity = 1, description = "URI of the target cluster/domain") @@ -604,7 +604,7 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { @Override public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); @@ -623,7 +623,7 @@ public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, @Override public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrPair) { log.atInfo().setMessage(()->"Done receiving captured stream for " + requestKey + ":" + rrPair.requestData).log(); @@ -639,7 +639,7 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, } } - Void handleCompletedTransaction(IInstrumentationAttributes context, + Void handleCompletedTransaction(IInstrumentationAttributes context, @NonNull UniqueReplayerRequestKey requestKey, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { @@ -652,7 +652,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes try (var tupleHandlingContext = httpContext.createTupleContext()) { packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); } - commitTrafficStreams(context, rrPair.trafficStreamKeysBeingHeld, rrPair.completionStatus); + commitTrafficStreams(rrPair.completionStatus, context, rrPair.trafficStreamKeysBeingHeld); return null; } else { log.atError().setCause(t).setMessage(()->"Throwable passed to handle() for " + requestKey + @@ -686,26 +686,27 @@ Void handleCompletedTransaction(IInstrumentationAttributes @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { - commitTrafficStreams(ctx, trafficStreamKeysBeingHeld, status); + commitTrafficStreams(status, ctx, trafficStreamKeysBeingHeld); } @SneakyThrows - private void commitTrafficStreams(IInstrumentationAttributes context, - List trafficStreamKeysBeingHeld, - RequestResponsePacketPair.ReconstructionStatus status) { - commitTrafficStreams(context, trafficStreamKeysBeingHeld, - status != RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY); + private void commitTrafficStreams(RequestResponsePacketPair.ReconstructionStatus status, + IReplayContexts.IChannelKeyContext context, + List trafficStreamKeysBeingHeld) { + commitTrafficStreams(status != RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY, + context, trafficStreamKeysBeingHeld); } @SneakyThrows - private void commitTrafficStreams(IInstrumentationAttributes context, - List trafficStreamKeysBeingHeld, boolean shouldCommit) { + private void commitTrafficStreams(boolean shouldCommit, + IReplayContexts.IChannelKeyContext context, + List trafficStreamKeysBeingHeld) { if (shouldCommit && trafficStreamKeysBeingHeld != null) { for (var tsk : trafficStreamKeysBeingHeld) { tsk.getTrafficStreamsContext().close(); - trafficCaptureSource.commitTrafficStream(context, tsk); + trafficCaptureSource.commitTrafficStream(()->context, tsk); } } } @@ -713,19 +714,19 @@ private void commitTrafficStreams(IInstrumentationAttributes ctx, + IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant timestamp, @NonNull List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); cf.map(f->f.whenComplete((v,t)->{ - commitTrafficStreams(ctx, trafficStreamKeysBeingHeld, status); + commitTrafficStreams(status, ctx, trafficStreamKeysBeingHeld); }), ()->"closing the channel in the ReplayEngine"); } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IReplayContexts.IChannelKeyContext ctx) { - commitTrafficStreams(ctx, List.of(tsk), true); + commitTrafficStreams(true, ctx, List.of(tsk)); } private TransformedTargetRequestAndResponse @@ -987,7 +988,8 @@ public void pullCaptureFromSourceToAccumulator( if (stopReadingRef.get()) { break; } - this.nextChunkFutureRef.set(trafficChunkStream.readNextTrafficStreamChunk(topLevelContext)); + this.nextChunkFutureRef.set(trafficChunkStream + .readNextTrafficStreamChunk(topLevelContext::createReadChunkContext)); List trafficStreams = null; try { trafficStreams = this.nextChunkFutureRef.get().get(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index cbf728ba2..c312333c8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -59,7 +59,7 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer activeChannelFuture; private final Channel channel; AggregatedRawResponse.Builder responseBuilder; - IWithTypedEnclosingScope> currentRequestContextUnion; + IWithTypedEnclosingScope currentRequestContextUnion; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, ReplayContexts.HttpTransactionContext httpTransactionContext) { @@ -68,8 +68,8 @@ public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri } public NettyPacketToHttpConsumer(ChannelFuture clientConnection, - IReplayContexts.IReplayerHttpTransactionContext ctx) { - var parentContext = new ReplayContexts.TargetRequestContext(ctx); + IReplayContexts.IReplayerHttpTransactionContext ctx) { + var parentContext = ctx.createTargetRequestContext(); this.setCurrentRequestContext(new ReplayContexts.RequestSendingContext(parentContext)); responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = @@ -93,24 +93,24 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, }); } - private > & - IScopedInstrumentationAttributes> + private & + IScopedInstrumentationAttributes> void setCurrentRequestContext(T requestSendingContext) { currentRequestContextUnion = requestSendingContext; } - private IScopedInstrumentationAttributes getCurrentRequestSpan() { - return (IScopedInstrumentationAttributes) currentRequestContextUnion; + private IScopedInstrumentationAttributes getCurrentRequestSpan() { + return (IScopedInstrumentationAttributes) currentRequestContextUnion; } - public IReplayContexts.ITargetRequestContext getParentContext() { + public IReplayContexts.ITargetRequestContext getParentContext() { return currentRequestContextUnion.getLogicalEnclosingScope(); } public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup, SslContext sslContext, URI serverUri, - IReplayContexts.IChannelKeyContext channelKeyContext) { + IReplayContexts.IChannelKeyContext channelKeyContext) { String host = serverUri.getHost(); int port = serverUri.getPort(); log.atTrace().setMessage(()->"Active - setting up backend connection to " + host + ":" + port).log(); @@ -179,7 +179,7 @@ private void activateChannelForThisConsumer() { pipeline.addFirst(new ReadMeteringingHandler(size->{ if (!(this.currentRequestContextUnion instanceof IReplayContexts.IRequestSendingContext)) { this.getCurrentRequestSpan().close(); - this.setCurrentRequestContext(new ReplayContexts.ReceivingHttpResponseContext(getParentContext())); + this.setCurrentRequestContext(getParentContext().createHttpReceivingContext()); } getParentContext().onBytesReceived(size); })); @@ -240,7 +240,7 @@ public DiagnosticTrackableCompletableFuture consumeBytes(ByteBuf pa return activeChannelFuture; } - private IReplayContexts.IReplayerHttpTransactionContext httpContext() { + private IReplayContexts.IReplayerHttpTransactionContext httpContext() { return getParentContext().getLogicalEnclosingScope(); } @@ -297,7 +297,7 @@ private IReplayContexts.IReplayerHttpTransactionContext htt finalizeRequest() { var ff = activeChannelFuture.getDeferredFutureThroughHandle((v,t)-> { this.getCurrentRequestSpan().close(); - this.setCurrentRequestContext(new ReplayContexts.WaitingForHttpResponseContext(getParentContext())); + this.setCurrentRequestContext(getParentContext().createWaitingForResponseContext()); var future = new CompletableFuture(); var rval = new DiagnosticTrackableCompletableFuture(future, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 5ede26f06..8fe3b298a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -69,7 +69,7 @@ public HttpJsonTransformingConsumer(IJsonTransformer transformer, IAuthTransformerFactory authTransformerFactory, IPacketFinalizingConsumer transformedPacketReceiver, IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { - transformationContext = new ReplayContexts.RequestTransformationContext(httpTransactionContext); + transformationContext = httpTransactionContext.createTransformationContext(); chunkSizes = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS); chunkSizes.add(new ArrayList<>(EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS)); chunks = new ArrayList<>(HTTP_MESSAGE_NUM_SEGMENTS + EXPECTED_PACKET_COUNT_GUESS_FOR_HEADERS); @@ -163,7 +163,7 @@ public DiagnosticTrackableCompletableFuturef.thenApply(r->reason == null ? - new TransformedOutputAndResult(r, HttpRequestTransformationStatus.SKIPPED, null) : - new TransformedOutputAndResult(r, HttpRequestTransformationStatus.ERROR, reason)) - .whenComplete((v,t)->transformationContext.close()), + return finalizedFuture.map(f->f.thenApply(r -> reason == null ? + new TransformedOutputAndResult<>(r, HttpRequestTransformationStatus.SKIPPED, null) : + new TransformedOutputAndResult<>(r, HttpRequestTransformationStatus.ERROR, reason) + ) + .whenComplete((v,t)->{ + transformationContext.onTransformSkip(); + transformationContext.close(); + }), ()->"HttpJsonTransformingConsumer.redriveWithoutTransformation().map()"); } } \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 3a8e7e845..142739f36 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -29,13 +29,13 @@ public class NettyDecodedHttpRequestPreliminaryConvertHandler extends Channel final IJsonTransformer transformer; final List> chunkSizes; final String diagnosticLabel; - private final IReplayContexts.IRequestTransformationContext httpTransactionContext; + private final IReplayContexts.IRequestTransformationContext httpTransactionContext; static final MetricsLogger metricsLogger = new MetricsLogger("NettyDecodedHttpRequestPreliminaryConvertHandler"); public NettyDecodedHttpRequestPreliminaryConvertHandler(IJsonTransformer transformer, List> chunkSizes, RequestPipelineOrchestrator requestPipelineOrchestrator, - IReplayContexts.IRequestTransformationContext httpTransactionContext) { + IReplayContexts.IRequestTransformationContext httpTransactionContext) { this.transformer = transformer; this.chunkSizes = chunkSizes; this.requestPipelineOrchestrator = requestPipelineOrchestrator; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 131608c70..c2d0149ac 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -45,14 +45,14 @@ public class RequestPipelineOrchestrator { public static final String HTTP_REQUEST_DECODER_NAME = "HTTP_REQUEST_DECODER"; private final List> chunkSizes; final IPacketFinalizingConsumer packetReceiver; - private final IReplayContexts.IRequestTransformationContext httpTransactionContext; + private final IReplayContexts.IRequestTransformationContext httpTransactionContext; @Getter final IAuthTransformerFactory authTransfomerFactory; public RequestPipelineOrchestrator(List> chunkSizes, IPacketFinalizingConsumer packetReceiver, IAuthTransformerFactory incomingAuthTransformerFactory, - IReplayContexts.IRequestTransformationContext httpTransactionContext) { + IReplayContexts.IRequestTransformationContext httpTransactionContext) { this.chunkSizes = chunkSizes; this.packetReceiver = packetReceiver; this.authTransfomerFactory = incomingAuthTransformerFactory != null ? incomingAuthTransformerFactory : diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java index 1a068d108..4648bceb9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java @@ -8,5 +8,5 @@ public interface ISourceTrafficChannelKey { String getNodeId(); String getConnectionId(); - @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); + @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 04ba81123..a8996ccdb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -14,6 +14,7 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -35,6 +36,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Supplier; import java.util.stream.Collectors; /** @@ -106,7 +108,7 @@ private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { } var kafkaCtx = (ReplayContexts.KafkaRecordContext) looseParentScope; kafkaCtx.close(); - channelContextManager.releaseContextFor((ReplayContexts.ChannelKeyContext) kafkaCtx.getImmediateEnclosingScope()); + channelContextManager.releaseContextFor(kafkaCtx.getImmediateEnclosingScope()); } public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull IInstrumentationAttributes globalContext, @@ -169,7 +171,7 @@ public static Properties buildKafkaProperties(@NonNull String brokers, @Override @SneakyThrows - public void touch(IInstrumentationAttributes context) { + public void touch(ITrafficSourceContexts.IReadChunkContext context) { CompletableFuture.runAsync(()->trackingKafkaConsumer.touch(context), kafkaExecutor).get(); } @@ -186,15 +188,16 @@ public Optional getNextRequiredTouch() { @Override @SuppressWarnings("unchecked") public CompletableFuture> - readNextTrafficStreamChunk(IInstrumentationAttributes context) { + readNextTrafficStreamChunk(Supplier contextSupplier) { log.atTrace().setMessage("readNextTrafficStreamChunk()").log(); return CompletableFuture.supplyAsync(() -> { log.atTrace().setMessage("async...readNextTrafficStreamChunk()").log(); - return readNextTrafficStreamSynchronously(context); + return readNextTrafficStreamSynchronously(contextSupplier); }, kafkaExecutor); } - public List readNextTrafficStreamSynchronously(IInstrumentationAttributes context) { + public List + readNextTrafficStreamSynchronously(ITrafficSourceContexts.IReadChunkContext context) { log.atTrace().setMessage("readNextTrafficStreamSynchronously()").log(); try { return trackingKafkaConsumer.getNextBatchOfRecords(context, (offsetData,kafkaRecord) -> { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java index af62e08cc..f1fae148a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrackingKafkaConsumer.java @@ -11,9 +11,11 @@ import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.tracing.KafkaConsumerContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.slf4j.event.Level; import java.time.Clock; @@ -29,6 +31,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -79,7 +82,7 @@ public int hashCode() { */ public static final int POLL_TIMEOUT_KEEP_ALIVE_DIVISOR = 4; - @NonNull private final IInstrumentationAttributes globalContext; + @NonNull private final RootReplayerContext globalContext; private final Consumer kafkaConsumer; final String topic; @@ -102,7 +105,7 @@ public int hashCode() { private final AtomicInteger kafkaRecordsLeftToCommitEventually; private final AtomicBoolean kafkaRecordsReadyToCommit; - public TrackingKafkaConsumer(@NonNull IInstrumentationAttributes globalContext, + public TrackingKafkaConsumer(@NonNull RootReplayerContext globalContext, Consumer kafkaConsumer, String topic, Duration keepAliveInterval, Clock c, java.util.function.Consumer onCommitKeyCallback) { @@ -125,7 +128,7 @@ public TrackingKafkaConsumer(@NonNull IInstrumentationAttributes globalContext, public void onPartitionsRevoked(Collection partitions) { new KafkaConsumerContexts.AsyncListeningContext(globalContext).onPartitionsRevoked(partitions); synchronized (commitDataLock) { - safeCommit(globalContext); + safeCommit(globalContext::createCommitContext); partitions.forEach(p -> { var tp = new TopicPartition(topic, p.partition()); nextSetOfCommitsMap.remove(tp); @@ -166,11 +169,11 @@ public Optional getNextRequiredTouch() { return r; } - public void touch(IInstrumentationAttributes context) { - try (var touchCtx = new KafkaConsumerContexts.TouchScopeContext(context)) { + public void touch(ITrafficSourceContexts.IBackPressureBlockContext context) { + try (var touchCtx = context.createNewTouchContext()) { log.trace("touch() called."); pause(); - try (var pollCtx = new KafkaConsumerContexts.PollScopeContext(touchCtx)) { + try (var pollCtx = touchCtx.createNewPollContext()) { var records = kafkaConsumer.poll(Duration.ZERO); if (!records.isEmpty()) { throw new IllegalStateException("Expected no entries once the consumer was paused. " + @@ -184,7 +187,7 @@ public void touch(IInstrumentationAttributes context) { } finally { resume(); } - safeCommit(context); + safeCommit(()->context.createCommitContext()); lastTouchTimeRef.set(clock.instant()); } } @@ -229,11 +232,11 @@ private Collection getActivePartitions() { } public Stream - getNextBatchOfRecords(IInstrumentationAttributes context, + getNextBatchOfRecords(ITrafficSourceContexts.IReadChunkContext context, BiFunction, T> builder) { - safeCommit(context); + safeCommit(()->context.createCommitContext()); var records = safePollWithSwallowedRuntimeExceptions(context); - safeCommit(context); + safeCommit(()->context.createCommitContext()); return applyBuilder(builder, records); } @@ -252,11 +255,11 @@ private Stream applyBuilder(BiFunction - safePollWithSwallowedRuntimeExceptions(IInstrumentationAttributes context) { + safePollWithSwallowedRuntimeExceptions(ITrafficSourceContexts.IReadChunkContext context) { try { lastTouchTimeRef.set(clock.instant()); ConsumerRecords records; - try (var pollContext = new KafkaConsumerContexts.PollScopeContext(context)) { + try (var pollContext = context.createPollContext()) { records = kafkaConsumer.poll(keepAliveInterval.dividedBy(POLL_TIMEOUT_KEEP_ALIVE_DIVISOR)); } log.atLevel(records.isEmpty()? Level.TRACE:Level.INFO) @@ -316,16 +319,15 @@ private void addKeyContextForEventualCommit(ITrafficStreamKey streamKey, KafkaCo .add(new OrderedKeyHolder(kafkaTsk.getOffset(), streamKey)); } - private void safeCommit(IInstrumentationAttributes incomingContext) { + private void safeCommit(Supplier commitContextSupplier) { HashMap nextCommitsMapCopy; - KafkaConsumerContexts.CommitScopeContext context = null; + IKafkaConsumerContexts.ICommitScopeContext context = null; synchronized (commitDataLock) { if (nextSetOfCommitsMap.isEmpty()) { return; } - context = new KafkaConsumerContexts.CommitScopeContext(incomingContext); - nextCommitsMapCopy = new HashMap<>(); - nextCommitsMapCopy.putAll(nextSetOfCommitsMap); + context = commitContextSupplier.get(); + nextCommitsMapCopy = new HashMap<>(nextSetOfCommitsMap); } try { safeCommitStatic(context, kafkaConsumer, nextCommitsMapCopy); @@ -363,12 +365,12 @@ private void safeCommit(IInstrumentationAttributes incomingContext) { } } - private static void safeCommitStatic(KafkaConsumerContexts.CommitScopeContext context, + private static void safeCommitStatic(IKafkaConsumerContexts.ICommitScopeContext context, Consumer kafkaConsumer, HashMap nextCommitsMap) { assert !nextCommitsMap.isEmpty(); log.atDebug().setMessage(() -> "Committing " + nextCommitsMap).log(); - try (var kafkaContext = new KafkaConsumerContexts.KafkaCommitScopeContext(context)) { + try (var kafkaContext = context.createNewKafkaCommitContext()) { kafkaConsumer.commitSync(nextCommitsMap); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java index 4d9009015..f6c79bee3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/TrafficStreamKeyWithKafkaRecordId.java @@ -5,7 +5,6 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.util.StringJoiner; @@ -31,7 +30,7 @@ class TrafficStreamKeyWithKafkaRecordId extends PojoTrafficStreamKeyAndContext i this.partition = partition; this.offset = offset; var kafkaContext = contextFactory.apply(this); - this.setTrafficStreamsContext(new ReplayContexts.TrafficStreamsLifecycleContext(kafkaContext, this)); + this.setTrafficStreamsContext(kafkaContext.createTrafficLifecyleContext(this)); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index 6ab207904..458f52155 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -8,9 +8,9 @@ import java.util.function.Function; public class ChannelContextManager implements Function { - private final IInstrumentationAttributes globalContext; + private final RootReplayerContext globalContext; - public ChannelContextManager(IInstrumentationAttributes globalContext) { + public ChannelContextManager(RootReplayerContext globalContext) { this.globalContext = globalContext; } @@ -47,7 +47,7 @@ public ReplayContexts.ChannelKeyContext apply(ITrafficStreamKey tsk) { public ReplayContexts.ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), - k-> new RefCountedContext(new ReplayContexts.ChannelKeyContext(globalContext, tsk))).retain(); + k-> new RefCountedContext(globalContext.createChannelContext(tsk))).retain(); } public ReplayContexts.ChannelKeyContext releaseContextFor(ReplayContexts.ChannelKeyContext ctx) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index f5632a626..de4684370 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -26,20 +26,22 @@ private MetricNames() {} public static final String ACTIVE_PARTITIONS_ASSIGNED_COUNT = "numPartitionsAssigned"; } - interface IAsyncListeningContext extends IInstrumentationAttributes { + interface IAsyncListeningContext extends IInstrumentationAttributes { String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; @Override default String getScopeName() { return SCOPE_NAME; } } - interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { + interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; @Override default String getScopeName() { return SCOPE_NAME; } } - interface ITouchScopeContext extends IKafkaConsumerScope { + interface ITouchScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.TOUCH; @Override default String getActivityName() { return ACTIVITY_NAME; } + + IPollScopeContext createNewPollContext(); } - interface IPollScopeContext extends IKafkaConsumerScope { + interface IPollScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.KAFKA_POLL; @Override default String getActivityName() { return ACTIVITY_NAME; } @@ -48,16 +50,18 @@ interface IPollScopeContext extends IKafkaCons /** * Context for the KafkaConsumer's bookkeeping around and including the commit service call */ - interface ICommitScopeContext extends IKafkaConsumerScope { + interface ICommitScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.COMMIT; @Override default String getActivityName() { return ACTIVITY_NAME; } + + IKafkaCommitScopeContext createNewKafkaCommitContext(); } /** * Context for ONLY the service call to Kafka to perform the commit. */ - interface IKafkaCommitScopeContext extends IKafkaConsumerScope{ + interface IKafkaCommitScopeContext extends IKafkaConsumerScope{ String ACTIVITY_NAME = ActivityNames.KAFKA_COMMIT; @Override default String getActivityName() { return ACTIVITY_NAME; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index b0b593786..fca328eed 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -2,14 +2,9 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.LongUpDownCounter; -import io.opentelemetry.api.metrics.MeterProvider; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; @@ -76,7 +71,7 @@ private MetricNames() {} public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; } - public interface IAccumulationScope extends IScopedInstrumentationAttributes { + public interface IAccumulationScope extends IScopedInstrumentationAttributes { String SCOPE_NAME2 = "Replay"; @Override @@ -85,21 +80,11 @@ default String getScopeName() { } } - public interface IChannelKeyContext - extends IAccumulationScope, - IConnectionContext { + public interface IChannelKeyContext + extends IAccumulationScope, + IConnectionContext { String ACTIVITY_NAME = ActivityNames.CHANNEL; - class MetricInstruments extends CommonScopedMetricInstruments { - final LongUpDownCounter activeChannelCounter; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); - activeChannelCounter = meter - .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); - } - } - @Override default String getActivityName() { return ACTIVITY_NAME;} // do not add this as a property @@ -119,43 +104,37 @@ default String getNodeId() { void onTargetConnectionClosed(); } - public interface IKafkaRecordContext - extends IAccumulationScope, - IWithTypedEnclosingScope> + public interface IKafkaRecordContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.RECORD_LIFETIME; - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } - @Override default String getActivityName() { return ACTIVITY_NAME;} static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); String getRecordId(); + @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { return IAccumulationScope.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); } + + ITrafficStreamsLifecycleContext createTrafficLifecyleContext(ITrafficStreamKey tsk); } - public interface ITrafficStreamsLifecycleContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface ITrafficStreamsLifecycleContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TRAFFIC_STREAM_LIFETIME; - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } + ReplayContexts.HttpTransactionContext createHttpTransactionContext(UniqueReplayerRequestKey requestKey, + Instant sourceTimestamp); @Override default String getActivityName() { return ACTIVITY_NAME;} ITrafficStreamKey getTrafficStreamKey(); - IChannelKeyContext getChannelKeyContext(); + IChannelKeyContext getChannelKeyContext(); default String getConnectionId() { return getChannelKey().getConnectionId(); } @@ -164,25 +143,19 @@ default ISourceTrafficChannelKey getChannelKey() { } } - public interface IReplayerHttpTransactionContext - extends IHttpTransactionContext, - IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IReplayerHttpTransactionContext + extends IHttpTransactionContext, + IAccumulationScope, + IWithTypedEnclosingScope { AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); ITupleHandlingContext createTupleContext(); - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } - String ACTIVITY_NAME = ActivityNames.HTTP_TRANSACTION; @Override default String getActivityName() { return ACTIVITY_NAME;} UniqueReplayerRequestKey getReplayerRequestKey(); - IChannelKeyContext getChannelKeyContext(); + IChannelKeyContext getChannelKeyContext(); Instant getTimeOfOriginalRequest(); default String getConnectionId() { @@ -205,49 +178,39 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return IHttpTransactionContext.super.fillAttributes( builder.put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex())); } + + ReplayContexts.RequestTransformationContext createTransformationContext(); + + IScopedInstrumentationAttributes createAccumulatorContext(); + + ReplayContexts.TargetRequestContext createTargetRequestContext(); + + IScheduledContext createScheduledContext(Instant timestamp); } - public interface IRequestAccumulationContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IRequestAccumulationContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.ACCUMULATING_REQUEST; - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } - @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface IResponseAccumulationContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IResponseAccumulationContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.ACCUMULATING_RESPONSE; - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } - @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface IRequestTransformationContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IRequestTransformationContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TRANSFORMATION; - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } - @Override default String getActivityName() { return ACTIVITY_NAME;} void onHeaderParse(); @@ -270,56 +233,54 @@ public MetricInstruments(MeterProvider meterProvider) { void aggregateOutputChunk(int sizeInBytes); } - public interface IScheduledContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IScheduledContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.SCHEDULED; - class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - } - } - @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface ITargetRequestContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface ITargetRequestContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TARGET_TRANSACTION; @Override default String getActivityName() { return ACTIVITY_NAME;} void onBytesSent(int size); void onBytesReceived(int size); + + IReceivingHttpResponseContext createHttpReceivingContext(); + + IWaitingForHttpResponseContext createWaitingForResponseContext(); } - public interface IRequestSendingContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IRequestSendingContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.REQUEST_SENDING; @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface IWaitingForHttpResponseContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IWaitingForHttpResponseContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.WAITING_FOR_RESPONSE; @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface IReceivingHttpResponseContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface IReceivingHttpResponseContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.RECEIVING_RESPONSE; @Override default String getActivityName() { return ACTIVITY_NAME;} } - public interface ITupleHandlingContext - extends IAccumulationScope, - IWithTypedEnclosingScope> { + public interface ITupleHandlingContext + extends IAccumulationScope, + IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TUPLE_HANDLING; @Override default String getActivityName() { return ACTIVITY_NAME; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java index 93e0e0cc8..3698c65b9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java @@ -1,9 +1,11 @@ package org.opensearch.migrations.replay.tracing; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; -public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { +public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { - TrafficSourceContexts.ReadChunkContext createReadChunkContext(); + ITrafficSourceContexts.IReadChunkContext createReadChunkContext(); + IReplayContexts.IChannelKeyContext createChannelContext(ITrafficStreamKey tsk); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java index c3621740f..d6702ba62 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -1,6 +1,5 @@ package org.opensearch.migrations.replay.tracing; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface ITrafficSourceContexts { @@ -16,22 +15,34 @@ private ActivityNames() {} public static final String WAIT_FOR_NEXT_BACK_PRESSURE_CHECK = "waitForNextBackPressureCheck"; } - interface ITrafficSourceContext extends IScopedInstrumentationAttributes { + interface ITrafficSourceContext extends IScopedInstrumentationAttributes { String SCOPE_NAME = ScopeNames.TRAFFIC_SCOPE; @Override default String getScopeName() { return SCOPE_NAME; } } - interface IReadChunkContext extends ITrafficSourceContext { + interface IReadChunkContext extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.READ_NEXT_TRAFFIC_CHUNK; @Override default String getActivityName() { return ACTIVITY_NAME; } + + IBackPressureBlockContext createBackPressureContext(); + + IKafkaConsumerContexts.IPollScopeContext createPollContext(); + + IKafkaConsumerContexts.ICommitScopeContext createCommitContext(); } - interface IBackPressureBlockContext extends ITrafficSourceContext { + interface IBackPressureBlockContext extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.BACK_PRESSURE_BLOCK; @Override default String getActivityName() { return ACTIVITY_NAME; } + + IWaitForNextSignal createWaitForSignalContext(); + + IKafkaConsumerContexts.ITouchScopeContext createNewTouchContext(); + + IKafkaConsumerContexts.ICommitScopeContext createCommitContext(); } - interface IWaitForNextSignal extends ITrafficSourceContext { + interface IWaitForNextSignal extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.WAIT_FOR_NEXT_BACK_PRESSURE_CHECK; default String getActivityName() { return ACTIVITY_NAME; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 128b8f85a..1573b2c76 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -9,8 +9,10 @@ import lombok.Getter; import lombok.NonNull; import org.apache.kafka.common.TopicPartition; +import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; +import org.opensearch.migrations.tracing.IHasRootInstrumentationScope; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import java.util.Collection; @@ -21,7 +23,7 @@ private KafkaConsumerContexts() {} @AllArgsConstructor public static class AsyncListeningContext - implements IKafkaConsumerContexts.IAsyncListeningContext { + implements IKafkaConsumerContexts.IAsyncListeningContext { public static class MetricInstruments { public final LongCounter kafkaPartitionsRevokedCounter; public final LongCounter kafkaPartitionsAssignedCounter; @@ -39,15 +41,10 @@ public MetricInstruments(MeterProvider meterProvider) { @Getter @NonNull - private final IInstrumentationAttributes enclosingScope; - - @Override - public @NonNull RootReplayerContext getRootInstrumentationScope() { - return enclosingScope.getRootInstrumentationScope(); - } + public final RootReplayerContext enclosingScope; private @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().asyncListeningInstruments; + return enclosingScope.asyncListeningInstruments; } public void onPartitionsRevoked(Collection partitions) { @@ -66,15 +63,20 @@ private void onParitionsAssignedChanged(int delta) { } public static class TouchScopeContext - extends DirectNestedSpanContext> - implements IKafkaConsumerContexts.ITouchScopeContext + extends DirectNestedSpanContext + implements IKafkaConsumerContexts.ITouchScopeContext { + @Override + public IKafkaConsumerContexts.IPollScopeContext createNewPollContext() { + return new KafkaConsumerContexts.PollScopeContext(getRootInstrumentationScope(), this); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); } } - public TouchScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { + public TouchScopeContext(@NonNull TrafficSourceContexts.BackPressureBlockContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -85,8 +87,8 @@ public TouchScopeContext(@NonNull IInstrumentationAttributes> - implements IKafkaConsumerContexts.IPollScopeContext { + extends BaseNestedSpanContext + implements IKafkaConsumerContexts.IPollScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); @@ -98,16 +100,21 @@ public CommonScopedMetricInstruments getMetrics() { return getRootInstrumentationScope().pollInstruments; } - public PollScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { - super(enclosingScope); + public PollScopeContext(@NonNull RootReplayerContext rootScope, + @NonNull IInstrumentationAttributes enclosingScope) { + super(rootScope, enclosingScope); initializeSpan(); } - } public static class CommitScopeContext - extends DirectNestedSpanContext> - implements IKafkaConsumerContexts.ICommitScopeContext { + extends BaseNestedSpanContext + implements IKafkaConsumerContexts.ICommitScopeContext { + + @Override + public IKafkaConsumerContexts.IKafkaCommitScopeContext createNewKafkaCommitContext() { + return new KafkaConsumerContexts.KafkaCommitScopeContext(this); + } public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { @@ -120,16 +127,16 @@ public MetricInstruments getMetrics() { return getRootInstrumentationScope().commitInstruments; } - public CommitScopeContext(@NonNull IInstrumentationAttributes enclosingScope) { - super(enclosingScope); + public CommitScopeContext(@NonNull RootReplayerContext rootScope, + @NonNull IInstrumentationAttributes enclosingScope) { + super(rootScope, enclosingScope); initializeSpan(); } - } public static class KafkaCommitScopeContext - extends DirectNestedSpanContext - implements IKafkaConsumerContexts.IKafkaCommitScopeContext { + extends DirectNestedSpanContext + implements IKafkaConsumerContexts.IKafkaCommitScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 3f5b4c8a2..13ad84d6e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -9,10 +9,11 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.AbstractNestedSpanContext; +import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IndirectNestedSpanContext; import java.time.Duration; @@ -26,13 +27,15 @@ public class ReplayContexts { private ReplayContexts() {} public static class ChannelKeyContext - extends AbstractNestedSpanContext> - implements IReplayContexts.IChannelKeyContext { + extends BaseNestedSpanContext + implements IReplayContexts.IChannelKeyContext { @Getter final ISourceTrafficChannelKey channelKey; - public ChannelKeyContext(IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { - super(enclosingScope); + public ChannelKeyContext(RootReplayerContext rootScope, + IInstrumentationAttributes enclosingScope, + ISourceTrafficChannelKey channelKey) { + super(rootScope, enclosingScope); this.channelKey = channelKey; initializeSpan(); } @@ -67,13 +70,12 @@ public void onTargetConnectionClosed() { } public static class KafkaRecordContext - extends DirectNestedSpanContext> - implements IReplayContexts.IKafkaRecordContext { + extends DirectNestedSpanContext + implements IReplayContexts.IKafkaRecordContext { final String recordId; - public KafkaRecordContext(IReplayContexts.IChannelKeyContext enclosingScope, - String recordId, int recordSize) { + public KafkaRecordContext(ChannelKeyContext enclosingScope, String recordId, int recordSize) { super(enclosingScope); this.recordId = recordId; initializeSpan(); @@ -102,14 +104,20 @@ public MetricInstruments(MeterProvider meterProvider) { public String getRecordId() { return recordId; } + + @Override + public IReplayContexts.ITrafficStreamsLifecycleContext + createTrafficLifecyleContext(ITrafficStreamKey tsk) { + return new ReplayContexts.TrafficStreamsLifecycleContext(this, tsk); + } } public static class TrafficStreamsLifecycleContext - extends IndirectNestedSpanContext, IReplayContexts.IChannelKeyContext> - implements IReplayContexts.ITrafficStreamsLifecycleContext { + extends IndirectNestedSpanContext + implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; - public TrafficStreamsLifecycleContext(IReplayContexts.IKafkaRecordContext enclosingScope, + public TrafficStreamsLifecycleContext(KafkaRecordContext enclosingScope, ITrafficStreamKey trafficStreamKey) { super(enclosingScope); this.trafficStreamKey = trafficStreamKey; @@ -133,31 +141,39 @@ public MetricInstruments(MeterProvider meterProvider) { } @Override - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getLogicalEnclosingScope(); } + @Override + public HttpTransactionContext createHttpTransactionContext(UniqueReplayerRequestKey requestKey, + Instant sourceTimestamp) { + return new ReplayContexts.HttpTransactionContext(getRootInstrumentationScope(), + this, requestKey, sourceTimestamp); + } + @Override public ITrafficStreamKey getTrafficStreamKey() { return trafficStreamKey; } @Override - public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { + public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { return getImmediateEnclosingScope().getLogicalEnclosingScope(); } } public static class HttpTransactionContext - extends IndirectNestedSpanContext, IReplayContexts.IChannelKeyContext> - implements IReplayContexts.IReplayerHttpTransactionContext { + extends BaseNestedSpanContext + implements IReplayContexts.IReplayerHttpTransactionContext { final UniqueReplayerRequestKey replayerRequestKey; @Getter final Instant timeOfOriginalRequest; - public HttpTransactionContext(IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, + public HttpTransactionContext(RootReplayerContext rootScope, + IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, UniqueReplayerRequestKey replayerRequestKey, Instant timeOfOriginalRequest) { - super(enclosingScope); + super(rootScope, enclosingScope); this.replayerRequestKey = replayerRequestKey; this.timeOfOriginalRequest = timeOfOriginalRequest; initializeSpan(); @@ -178,10 +194,30 @@ public MetricInstruments(MeterProvider meterProvider) { return getRootInstrumentationScope().httpTransactionContext; } - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getLogicalEnclosingScope(); } + @Override + public RequestTransformationContext createTransformationContext() { + return new ReplayContexts.RequestTransformationContext(this); + } + + @Override + public IScopedInstrumentationAttributes createAccumulatorContext() { + return new ReplayContexts.ResponseAccumulationContext(this); + } + + @Override + public TargetRequestContext createTargetRequestContext() { + return new ReplayContexts.TargetRequestContext(this); + } + + @Override + public IReplayContexts.IScheduledContext createScheduledContext(Instant timestamp) { + return new ReplayContexts.ScheduledContext(this, timestamp); + } + @Override public UniqueReplayerRequestKey getReplayerRequestKey() { return replayerRequestKey; @@ -193,15 +229,15 @@ public String toString() { } @Override - public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { + public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { return getImmediateEnclosingScope().getLogicalEnclosingScope(); } } public static class RequestAccumulationContext - extends DirectNestedSpanContext> - implements IReplayContexts.IRequestAccumulationContext { - public RequestAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.IRequestAccumulationContext { + public RequestAccumulationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -218,9 +254,9 @@ public MetricInstruments(MeterProvider meterProvider) { } public static class ResponseAccumulationContext - extends DirectNestedSpanContext> - implements IReplayContexts.IResponseAccumulationContext { - public ResponseAccumulationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.IResponseAccumulationContext { + public ResponseAccumulationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -237,9 +273,9 @@ public MetricInstruments(MeterProvider meterProvider) { } public static class RequestTransformationContext - extends DirectNestedSpanContext> - implements IReplayContexts.IRequestTransformationContext { - public RequestTransformationContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.IRequestTransformationContext { + public RequestTransformationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -352,12 +388,11 @@ public MetricInstruments(MeterProvider meterProvider) { } public static class ScheduledContext - extends DirectNestedSpanContext> - implements IReplayContexts.IScheduledContext { + extends DirectNestedSpanContext + implements IReplayContexts.IScheduledContext { private final Instant scheduledFor; - public ScheduledContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope, - Instant scheduledFor) { + public ScheduledContext(HttpTransactionContext enclosingScope, Instant scheduledFor) { super(enclosingScope); this.scheduledFor = scheduledFor; initializeSpan(); @@ -384,9 +419,9 @@ public void sendMeterEventsForEnd() { } public static class TargetRequestContext - extends DirectNestedSpanContext> - implements IReplayContexts.ITargetRequestContext { - public TargetRequestContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.ITargetRequestContext { + public TargetRequestContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); meterHistogramMillis(getMetrics().sourceTargetGap, @@ -424,12 +459,22 @@ public void onBytesSent(int size) { public void onBytesReceived(int size) { meterIncrementEvent(getMetrics().bytesRead, size); } + + @Override + public IReplayContexts.IReceivingHttpResponseContext createHttpReceivingContext() { + return new ReplayContexts.ReceivingHttpResponseContext(this); + } + + @Override + public IReplayContexts.IWaitingForHttpResponseContext createWaitingForResponseContext() { + return new ReplayContexts.WaitingForHttpResponseContext(this); + } } public static class RequestSendingContext - extends DirectNestedSpanContext> - implements IReplayContexts.IRequestSendingContext { - public RequestSendingContext(IReplayContexts.ITargetRequestContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.IRequestSendingContext { + public RequestSendingContext(TargetRequestContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -446,9 +491,9 @@ public MetricInstruments(MeterProvider meterProvider) { } public static class WaitingForHttpResponseContext - extends DirectNestedSpanContext> - implements IReplayContexts.IWaitingForHttpResponseContext { - public WaitingForHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.IWaitingForHttpResponseContext { + public WaitingForHttpResponseContext(TargetRequestContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -466,9 +511,9 @@ public MetricInstruments(MeterProvider meterProvider) { } public static class ReceivingHttpResponseContext - extends DirectNestedSpanContext> - implements IReplayContexts.IReceivingHttpResponseContext { - public ReceivingHttpResponseContext(IReplayContexts.ITargetRequestContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.IReceivingHttpResponseContext { + public ReceivingHttpResponseContext(TargetRequestContext enclosingScope) { super(enclosingScope); initializeSpan(); } @@ -486,9 +531,9 @@ public MetricInstruments(MeterProvider meterProvider) { } public static class TupleHandlingContext - extends DirectNestedSpanContext> - implements IReplayContexts.ITupleHandlingContext { - public TupleHandlingContext(IReplayContexts.IReplayerHttpTransactionContext enclosingScope) { + extends DirectNestedSpanContext + implements IReplayContexts.ITupleHandlingContext { + public TupleHandlingContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index d0571aae1..5c6916047 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -5,6 +5,7 @@ import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.MeterProvider; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.tracing.RootOtelContext; @@ -12,7 +13,7 @@ import lombok.Getter; @Getter -public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { +public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { public final KafkaConsumerContexts.AsyncListeningContext.MetricInstruments asyncListeningInstruments; public final KafkaConsumerContexts.TouchScopeContext.MetricInstruments touchInstruments; public final KafkaConsumerContexts.PollScopeContext.MetricInstruments pollInstruments; @@ -73,6 +74,14 @@ public RootReplayerContext(OpenTelemetry sdk) { @Override public TrafficSourceContexts.ReadChunkContext createReadChunkContext() { - return new TrafficSourceContexts.ReadChunkContext(this); + return new TrafficSourceContexts.ReadChunkContext(this, this); + } + + public ReplayContexts.ChannelKeyContext createChannelContext(ITrafficStreamKey tsk) { + return new ReplayContexts.ChannelKeyContext(this, this, tsk); + } + + public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { + return new KafkaConsumerContexts.CommitScopeContext(this, this); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index 820e7b2ac..15e269c72 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.metrics.MeterProvider; import lombok.NonNull; +import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -11,9 +12,24 @@ public class TrafficSourceContexts { private TrafficSourceContexts() {} public static class ReadChunkContext - extends DirectNestedSpanContext> - implements ITrafficSourceContexts.IReadChunkContext + extends BaseNestedSpanContext + implements ITrafficSourceContexts.IReadChunkContext { + @Override + public ITrafficSourceContexts.IBackPressureBlockContext createBackPressureContext() { + return new TrafficSourceContexts.BackPressureBlockContext(getRootInstrumentationScope(), this); + } + + @Override + public IKafkaConsumerContexts.IPollScopeContext createPollContext() { + return new KafkaConsumerContexts.PollScopeContext(getRootInstrumentationScope(), this); + } + + @Override + public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { + return new KafkaConsumerContexts.CommitScopeContext(getRootInstrumentationScope(), this); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); @@ -23,17 +39,32 @@ public MetricInstruments(MeterProvider meterProvider) { return getRootInstrumentationScope().readChunkInstruments; } - public ReadChunkContext(IInstrumentationAttributes enclosingScope) { - super(enclosingScope); + public ReadChunkContext(RootReplayerContext rootScope, IInstrumentationAttributes enclosingScope) { + super(rootScope, enclosingScope); initializeSpan(); } + } public static class BackPressureBlockContext - extends DirectNestedSpanContext> - implements ITrafficSourceContexts.IBackPressureBlockContext + extends BaseNestedSpanContext + implements ITrafficSourceContexts.IBackPressureBlockContext { + @Override + public ITrafficSourceContexts.IWaitForNextSignal createWaitForSignalContext() { + return new TrafficSourceContexts.WaitForNextSignal(getRootInstrumentationScope(), this); + } + + @Override + public IKafkaConsumerContexts.ITouchScopeContext createNewTouchContext() { + return new KafkaConsumerContexts.TouchScopeContext(this); + } + + @Override + public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { + return new KafkaConsumerContexts.CommitScopeContext(getRootInstrumentationScope(), this); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); @@ -43,16 +74,16 @@ public MetricInstruments(MeterProvider meterProvider) { return getRootInstrumentationScope().backPressureInstruments; } - public BackPressureBlockContext(@NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { - super(enclosingScope); + public BackPressureBlockContext(@NonNull RootReplayerContext rootScope, + @NonNull ITrafficSourceContexts.IReadChunkContext enclosingScope) { + super(rootScope, enclosingScope); initializeSpan(); } } public static class WaitForNextSignal - extends DirectNestedSpanContext> - implements ITrafficSourceContexts.IWaitForNextSignal { + extends BaseNestedSpanContext + implements ITrafficSourceContexts.IWaitForNextSignal { public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(MeterProvider meterProvider) { super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); @@ -62,8 +93,9 @@ public MetricInstruments(MeterProvider meterProvider) { return getRootInstrumentationScope().waitForNextSignalInstruments; } - public WaitForNextSignal(@NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { - super(enclosingScope); + public WaitForNextSignal(@NonNull RootReplayerContext rootScope, + @NonNull ITrafficSourceContexts.IBackPressureBlockContext enclosingScope) { + super(rootScope, enclosingScope); initializeSpan(); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 635a381db..097d742b5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -5,12 +5,11 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import org.opensearch.migrations.replay.tracing.TrafficSourceContexts; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.slf4j.event.Level; @@ -24,6 +23,8 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.function.Supplier; /** * The BlockingTrafficSource class implements ITrafficCaptureSource and wraps another instance. @@ -86,19 +87,16 @@ public void stopReadsPast(Instant pointInTime) { /** * Reads the next chunk that is available before the current stopReading barrier. However, * that barrier isn't meant to be a tight barrier with immediate effect. - * - * @return */ - @Override public CompletableFuture> - readNextTrafficStreamChunk(IRootReplayerContext context) { - var readContext = context.createReadChunkContext(); + readNextTrafficStreamChunk(Supplier commitContextSupplier) { + var readContext = commitContextSupplier.get(); log.info("BlockingTrafficSource::readNext"); var trafficStreamListFuture = CompletableFuture .supplyAsync(() -> blockIfNeeded(readContext), task -> new Thread(task).start()) .thenCompose(v -> { log.info("BlockingTrafficSource::composing"); - return underlyingSource.readNextTrafficStreamChunk(readContext); + return underlyingSource.readNextTrafficStreamChunk(()->readContext); }) .whenComplete((v,t)->readContext.close()); return trafficStreamListFuture.whenComplete((v, t) -> { @@ -107,7 +105,7 @@ public void stopReadsPast(Instant pointInTime) { } var maxLocallyObservedTimestamp = v.stream() .flatMap(tswk -> tswk.getStream().getSubStreamList().stream()) - .map(tso -> tso.getTs()) + .map(TrafficObservation::getTs) .max(Comparator.comparingLong(Timestamp::getSeconds) .thenComparingInt(Timestamp::getNanos)) .map(TrafficStreamUtils::instantFromProtoTimestamp) @@ -118,14 +116,14 @@ public void stopReadsPast(Instant pointInTime) { }); } - private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) { + private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) { if (stopReadingAtRef.get().equals(Instant.EPOCH)) { return null; } log.atInfo().setMessage(() -> "stopReadingAtRef=" + stopReadingAtRef + " lastTimestampSecondsRef=" + lastTimestampSecondsRef).log(); - ITrafficSourceContexts.IBackPressureBlockContext blockContext = null; + ITrafficSourceContexts.IBackPressureBlockContext blockContext = null; while (stopReadingAtRef.get().isBefore(lastTimestampSecondsRef.get())) { if (blockContext == null) { - blockContext = new TrafficSourceContexts.BackPressureBlockContext(readContext); + blockContext = readContext.createBackPressureContext(); } try { log.atInfo().setMessage("blocking until signaled to read the next chunk last={} stop={}") @@ -135,7 +133,7 @@ private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext "acquring readGate semaphore with timeout=" + waitIntervalMs).log(); - try (var waitContext = new TrafficSourceContexts.WaitForNextSignal(blockContext)) { + try (var waitContext = blockContext.createWaitForSignalContext()) { readGate.tryAcquire(waitIntervalMs, TimeUnit.MILLISECONDS); } } @@ -168,9 +166,10 @@ private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext context, + public CommitResult commitTrafficStream(Function contextFactory, ITrafficStreamKey trafficStreamKey) throws IOException { - var commitResult = underlyingSource.commitTrafficStream(context, trafficStreamKey); + var commitResult = underlyingSource.commitTrafficStream(contextFactory, trafficStreamKey); if (commitResult == CommitResult.AfterNextRead) { readGate.drainPermits(); readGate.release(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java index 845eb3c01..bf88b7e20 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java @@ -1,4 +1,9 @@ package org.opensearch.migrations.replay.traffic.source; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + public interface ISimpleTrafficCaptureSource extends ITrafficCaptureSource { } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java index 4c76cf2ad..ebc8c4da3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java @@ -1,9 +1,8 @@ package org.opensearch.migrations.replay.traffic.source; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import java.io.Closeable; import java.io.IOException; @@ -11,6 +10,8 @@ import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.function.Supplier; public interface ITrafficCaptureSource extends Closeable { @@ -18,12 +19,11 @@ enum CommitResult { Immediate, AfterNextRead, BlockedByOtherCommits, Ignored } - CompletableFuture> readNextTrafficStreamChunk(IRootReplayerContext context); + CompletableFuture> + readNextTrafficStreamChunk(Supplier contextSupplier); - /** - * Returns true if the committed results are immediate - */ - CommitResult commitTrafficStream(IInstrumentationAttributes context, + CommitResult commitTrafficStream(Function contextFactory, ITrafficStreamKey trafficStreamKey) throws IOException; default void close() throws IOException {} @@ -32,7 +32,7 @@ default void close() throws IOException {} * Keep-alive call to be used by the BlockingTrafficSource to keep this connection alive if * this is required. */ - default void touch(IInstrumentationAttributes context) {} + default void touch(ITrafficSourceContexts.IBackPressureBlockContext context) {} /** * @return The time that the next call to touch() must be completed for this source to stay diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 076718fd6..59146db1d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -9,7 +9,10 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; @@ -21,9 +24,12 @@ import java.io.EOFException; import java.io.IOException; import java.io.InputStream; +import java.time.Instant; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.function.Supplier; @Slf4j public class InputStreamOfTraffic implements ISimpleTrafficCaptureSource { @@ -31,17 +37,17 @@ public class InputStreamOfTraffic implements ISimpleTrafficCaptureSource { private final AtomicInteger trafficStreamsRead = new AtomicInteger(); private final ChannelContextManager channelContextManager; - public InputStreamOfTraffic(IInstrumentationAttributes context, InputStream inputStream) { + public InputStreamOfTraffic(RootReplayerContext context, InputStream inputStream) { this.channelContextManager = new ChannelContextManager(context); this.inputStream = inputStream; } public static class IOSTrafficStreamContext - extends DirectNestedSpanContext> - implements IReplayContexts.ITrafficStreamsLifecycleContext { + extends DirectNestedSpanContext + implements IReplayContexts.ITrafficStreamsLifecycleContext { @Getter private final ITrafficStreamKey trafficStreamKey; - public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext ctx, + public IOSTrafficStreamContext(@NonNull ReplayContexts.ChannelKeyContext ctx, ITrafficStreamKey tsk) { super(ctx); this.trafficStreamKey = tsk; @@ -49,12 +55,9 @@ public IOSTrafficStreamContext(@NonNull IReplayContexts.IChannelKeyContext getChannelKeyContext() { + public IReplayContexts.IChannelKeyContext getChannelKeyContext() { return getImmediateEnclosingScope(); } } @@ -74,11 +84,10 @@ public IReplayContexts.IChannelKeyContext getChannelKeyCont /** * Returns a CompletableFuture to a TrafficStream object or sets the cause exception to an * EOFException if the input has been exhausted. - * - * @return */ + @Override public CompletableFuture> - readNextTrafficStreamChunk(IInstrumentationAttributes context) { + readNextTrafficStreamChunk(Supplier contextSupplier) { return CompletableFuture.supplyAsync(() -> { var builder = TrafficStream.newBuilder(); try { @@ -104,7 +113,9 @@ public IReplayContexts.IChannelKeyContext getChannelKeyCont } @Override - public CommitResult commitTrafficStream(IInstrumentationAttributes ctx, ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(Function ctx, + ITrafficStreamKey trafficStreamKey) { // do nothing - this datasource isn't transactional return CommitResult.Immediate; } From 320e9d8555ea86d187b43a1bb07df636b2874972 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 11 Jan 2024 23:10:22 -0500 Subject: [PATCH 56/94] Working on updating proxy code to get everything to compile. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 36 ++--- .../tracing/IRootKafkaOffloaderContext.java | 15 +++ .../tracing/KafkaRecordContext.java | 28 +++- .../IConnectionCaptureFactory.java | 5 +- .../tracing/ConnectionContext.java | 21 ++- .../tracing/RootOffloaderContext.java | 10 +- .../tracing/BaseNestedSpanContext.java | 2 +- .../CommonScopedMetricInstruments.java | 52 +++++--- .../tracing/IInstrumentConstructor.java | 2 +- .../tracing/IInstrumentationAttributes.java | 1 - .../IScopedInstrumentationAttributes.java | 5 + .../migrations/tracing/RootOtelContext.java | 19 ++- .../commoncontexts/IConnectionContext.java | 4 + ...ava => InMemoryInstrumentationBundle.java} | 34 +---- ...nditionallyReliableLoggingHttpHandler.java | 7 +- .../netty/LoggingHttpHandler.java | 26 ++-- .../netty/RequestContextStateMachine.java | 4 +- .../netty/tracing/HttpMessageContext.java | 61 --------- .../tracing/IRootWireLoggingContext.java | 10 +- .../netty/tracing/RootWireLoggingContext.java | 18 ++- .../netty/tracing/WireCaptureContexts.java | 125 ++++++++++++++++++ .../replay/ClientConnectionPool.java | 2 +- .../migrations/replay/TrafficReplayer.java | 16 +-- .../NettyPacketToHttpConsumer.java | 2 +- .../kafka/KafkaTrafficCaptureSource.java | 14 +- .../tracing/IKafkaConsumerContexts.java | 4 - .../replay/tracing/IReplayContexts.java | 10 -- .../tracing/ITrafficSourceContexts.java | 5 +- .../replay/tracing/KafkaConsumerContexts.java | 20 +-- .../replay/tracing/ReplayContexts.java | 115 ++++++++-------- .../replay/tracing/RootReplayerContext.java | 85 ++++++------ .../replay/tracing/TrafficSourceContexts.java | 13 +- .../traffic/source/BlockingTrafficSource.java | 6 +- .../traffic/source/ITrafficCaptureSource.java | 4 +- .../traffic/source/InputStreamOfTraffic.java | 53 +------- .../migrations/replay/TestRequestKey.java | 9 +- .../TestTrafficStreamsLifecycleContext.java | 32 +---- .../migrations/replay/TestUtils.java | 5 +- .../migrations/tracing/TestContext.java | 24 ++++ 39 files changed, 479 insertions(+), 425 deletions(-) create mode 100644 TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/IRootKafkaOffloaderContext.java rename TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/{TestContext.java => InMemoryInstrumentationBundle.java} (58%) delete mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java create mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java create mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 4ddc646a7..44478a7d1 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -10,6 +10,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.apache.kafka.clients.producer.RecordMetadata; +import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; @@ -17,6 +18,7 @@ import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.IRootKafkaOffloaderContext; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.KafkaRecordContext; import java.io.IOException; @@ -37,29 +39,31 @@ public class KafkaCaptureFactory implements IConnectionCaptureFactory producer; private final String topicNameForTraffic; private final int bufferSize; - public KafkaCaptureFactory(String nodeId, Producer producer, + public KafkaCaptureFactory(IRootKafkaOffloaderContext rootScope, String nodeId, Producer producer, String topicNameForTraffic, int messageSize) { + this.rootScope = rootScope; this.nodeId = nodeId; this.producer = producer; this.topicNameForTraffic = topicNameForTraffic; this.bufferSize = messageSize - KAFKA_MESSAGE_OVERHEAD_BYTES; } - public KafkaCaptureFactory(String nodeId, Producer producer, int messageSize) { - this(nodeId, producer, DEFAULT_TOPIC_NAME_FOR_TRAFFIC, messageSize); + public KafkaCaptureFactory(IRootKafkaOffloaderContext rootScope, String nodeId, Producer producer, int messageSize) { + this(rootScope, nodeId, producer, DEFAULT_TOPIC_NAME_FOR_TRAFFIC, messageSize); } @Override - public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, - String connectionId) { + public IChannelConnectionCaptureSerializer + createOffloader(IConnectionContext ctx, String connectionId) { return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, - new StreamManager(ctx, connectionId)); + new StreamManager(rootScope, ctx, connectionId)); } @AllArgsConstructor @@ -74,15 +78,14 @@ static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { class StreamManager extends OrderedStreamLifecyleManager { IConnectionContext telemetryContext; + IRootKafkaOffloaderContext rootScope; String connectionId; Instant startTime; - public StreamManager(IConnectionContext ctx, String connectionId) { + public StreamManager(IRootKafkaOffloaderContext rootScope, IConnectionContext ctx, String connectionId) { // TODO - add https://opentelemetry.io/blog/2022/instrument-kafka-clients/ + this.rootScope = rootScope; this.telemetryContext = ctx; - ctx.meterIncrementEvent("offloader_created"); - telemetryContext.meterDeltaEvent("offloaders_active", 1); - this.connectionId = connectionId; this.startTime = Instant.now(); } @@ -90,15 +93,12 @@ public StreamManager(IConnectionContext ctx, String connectionId) { @Override public void close() throws IOException { log.atInfo().setMessage(() -> "factory.close()").log(); - telemetryContext.meterHistogramMillis("offloader_stream_lifetime", - Duration.between(startTime, Instant.now())); - telemetryContext.meterDeltaEvent("offloaders_active", -1); - telemetryContext.meterIncrementEvent("offloader_closed"); + telemetryContext.close(); } @Override public CodedOutputStreamWrapper createStream() { - telemetryContext.meterIncrementEvent("stream_created"); + telemetryContext.getCurrentSpan().addEvent("streamCreated"); ByteBuffer bb = ByteBuffer.allocate(bufferSize); return new CodedOutputStreamWrapper(CodedOutputStream.newInstance(bb), bb); @@ -123,9 +123,8 @@ public CodedOutputStreamWrapper createStream() { var cf = new CompletableFuture(); log.debug("Sending Kafka producer record: {} for topic: {}", recordId, topicNameForTraffic); - var flushContext = new KafkaRecordContext(telemetryContext, + var flushContext = rootScope.createKafkaRecordContext(telemetryContext, topicNameForTraffic, recordId, kafkaRecord.value().length); - telemetryContext.meterIncrementEvent("stream_flush_called"); // Async request to Kafka cluster producer.send(kafkaRecord, handleProducerRecordSent(cf, recordId, flushContext)); @@ -161,6 +160,7 @@ private Callback handleProducerRecordSent(CompletableFuture cf, // that field out of scope. return (metadata, exception) -> { log.atInfo().setMessage(()->"kafka completed sending a record").log(); + flushContext.meterHistogramMicros(exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms"); flushContext.meterIncrementEvent(exception==null ? "stream_flush_success" : "stream_flush_failure"); flushContext.meterIncrementEvent( @@ -169,9 +169,11 @@ private Callback handleProducerRecordSent(CompletableFuture cf, flushContext.close(); if (exception != null) { + flushContext.addException(exception); log.error("Error sending producer record: {}", recordId, exception); cf.completeExceptionally(exception); } else { + flushContext.onSuccessfulFlush(); log.debug("Kafka producer record: {} has finished sending for topic: {} and partition {}", recordId, metadata.topic(), metadata.partition()); cf.complete(metadata); diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/IRootKafkaOffloaderContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/IRootKafkaOffloaderContext.java new file mode 100644 index 000000000..b9221b901 --- /dev/null +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/IRootKafkaOffloaderContext.java @@ -0,0 +1,15 @@ +package org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing; + +import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; + +public interface IRootKafkaOffloaderContext extends IInstrumentConstructor { + KafkaRecordContext.MetricInstruments getKafkaOffloadingInstruments(); + + default KafkaRecordContext createKafkaRecordContext(IConnectionContext telemetryContext, + String topicNameForTraffic, + String recordId, + int length) { + return new KafkaRecordContext(this, telemetryContext, topicNameForTraffic, recordId, length); + } +} diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index dc2d08497..c6bb54b86 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -2,14 +2,20 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.Meter; import lombok.Getter; +import org.opensearch.migrations.tracing.BaseNestedSpanContext; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; -public class KafkaRecordContext extends DirectNestedSpanContext +public class KafkaRecordContext extends + BaseNestedSpanContext implements IScopedInstrumentationAttributes { - public static final String SCOPE_NAME = "KafkaCapture"; + public static final String ACTIVITY_NAME = "kafkaCommit"; static final AttributeKey TOPIC_ATTR = AttributeKey.stringKey("topic"); static final AttributeKey RECORD_ID_ATTR = AttributeKey.stringKey("recordId"); @@ -22,15 +28,27 @@ public class KafkaRecordContext extends DirectNestedSpanContext { - IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) throws IOException; + IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, + String connectionId) throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index dff389608..c3f6266ea 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -4,6 +4,7 @@ import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; import lombok.NonNull; @@ -11,11 +12,11 @@ import org.opensearch.migrations.tracing.AttributeNameMatchingPredicate; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.FilteringAttributeBuilder; +import org.opensearch.migrations.tracing.IHasRootInstrumentationScope; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; public class ConnectionContext extends BaseNestedSpanContext - implements IConnectionContext { - public static final String SCOPE_NAME = "Channel"; + implements IConnectionContext, IHasRootInstrumentationScope { private static final AttributeNameMatchingPredicate KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT = AttributeNameMatchingPredicate.builder(true).add(CONNECTION_ID_ATTR.getKey()).build(); @@ -29,8 +30,6 @@ public class ConnectionContext extends BaseNestedSpanContext getBuckets(String activityName, + double firstBucketSize, double lastBucketCeiling) { double[] bucketBoundary = new double[]{firstBucketSize}; - var buckets = DoubleStream.generate(()->{ + var buckets = DoubleStream.generate(() -> { var tmp = bucketBoundary[0]; bucketBoundary[0] *= 2.0; return tmp; - }).takeWhile(v->v<=lastBucketCeiling).boxed().collect(Collectors.toList()); - log.atInfo().setMessage(()->"Setting buckets for "+scopeName+":"+activityName+" to "+ - buckets.stream().map(x->""+x).collect(Collectors.joining(",","[","]"))).log(); - contextDuration = meter + }).takeWhile(v -> v <= lastBucketCeiling).boxed().collect(Collectors.toList()); + log.atInfo().setMessage(() -> "Setting buckets for " + activityName + " to " + + buckets.stream().map(x -> "" + x).collect(Collectors.joining(",", "[", "]"))).log(); + return buckets; + } + + public CommonScopedMetricInstruments(Meter meter, String activityName, List buckets) { + contextCounter = meter + .counterBuilder(activityName + "Count").build(); + exceptionCounter = meter + .counterBuilder(activityName + "ExceptionCount").build(); + var durationBuilder = meter .histogramBuilder(activityName + "Duration") - .setUnit("ms") - .setExplicitBucketBoundariesAdvice(buckets) - .build(); + .setUnit("ms"); + if (buckets != null) { + durationBuilder = durationBuilder.setExplicitBucketBoundariesAdvice(buckets); + } + contextDuration = durationBuilder.build(); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index f5ed0f433..5e979f046 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -4,6 +4,6 @@ import io.opentelemetry.api.trace.Span; public interface IInstrumentConstructor { - Span buildSpan(IInstrumentationAttributes enclosingScope, String scopeName, String spanName, Span linkedSpan, + Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 612999a2a..fccff732e 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -10,7 +10,6 @@ import java.util.ArrayList; public interface IInstrumentationAttributes { - String getScopeName(); IInstrumentationAttributes getEnclosingScope(); default Span getCurrentSpan() { return null; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 434d8cd12..5a087944a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -35,5 +35,10 @@ default void close() { default void addException(Exception e) { getCurrentSpan().recordException(e); + sendMeterEventsForException(e); + } + + default void sendMeterEventsForException(Exception e) { + meterIncrementEvent(getMetrics().exceptionCounter); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index e6a0fb817..6be44ffd1 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -27,6 +27,7 @@ public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; + private final String scopeName; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @NonNull String serviceName) { @@ -74,21 +75,17 @@ public static OpenTelemetry initializeOpenTelemetry(String collectorEndpoint, St } - public RootOtelContext() { - this(null); + public RootOtelContext(String scopeName) { + this(scopeName, null); } - public RootOtelContext(String collectorEndpoint, String serviceName) { - this(initializeOpenTelemetry(collectorEndpoint, serviceName)); + public RootOtelContext(String scopeName, String collectorEndpoint, String serviceName) { + this(scopeName, initializeOpenTelemetry(collectorEndpoint, serviceName)); } - public RootOtelContext(OpenTelemetry sdk) { + public RootOtelContext(String scopeName, OpenTelemetry sdk) { openTelemetryImpl = sdk != null ? sdk : initializeOpenTelemetry(null, null); - } - - @Override - public String getScopeName() { - return "Root"; + this.scopeName = scopeName; } @Override @@ -125,7 +122,7 @@ private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, S @Override public Span buildSpan(IInstrumentationAttributes enclosingScope, - String scopeName, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { + String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan, linkedSpan); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 845b13f40..314b5a9e9 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -20,4 +20,8 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder.put(CONNECTION_ID_ATTR, getConnectionId()) .put(NODE_ID_ATTR, getNodeId()); } + + void onConnectionCreated(); + + void onConnectionClosed(); } diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java similarity index 58% rename from TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java rename to TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java index e2e4d335d..99db69e6e 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java @@ -1,39 +1,24 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.trace.Span; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.export.MetricExporter; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; -import io.opentelemetry.sdk.trace.export.SpanExporter; import lombok.Getter; import java.time.Duration; -import java.util.Optional; -public class TestContext implements IInstrumentationAttributes { - @Getter - public IInstrumentConstructor rootInstrumentationScope; - @Getter +@Getter +public class InMemoryInstrumentationBundle { + public final OpenTelemetrySdk openTelemetrySdk; public final InMemorySpanExporter testSpanExporter; - @Getter public final InMemoryMetricExporter testMetricExporter; - public static TestContext withTracking() { - return new TestContext(InMemorySpanExporter.create(), InMemoryMetricExporter.create()); - } - - public static TestContext noTracking() { - return new TestContext(null, null); - } - - public TestContext(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter testMetricExporter) { + public InMemoryInstrumentationBundle(InMemorySpanExporter testSpanExporter, + InMemoryMetricExporter testMetricExporter) { this.testSpanExporter = testSpanExporter; this.testMetricExporter = testMetricExporter; @@ -49,13 +34,6 @@ public TestContext(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter .build()) .build()); } - var openTel = otelBuilder.build(); - rootInstrumentationScope = new RootOtelContext(openTel); + openTelemetrySdk = otelBuilder.build(); } - - @Override - public IInstrumentationAttributes getEnclosingScope() { - return null; - } - } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index c12fda546..79a0596bc 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -6,11 +6,9 @@ import lombok.Lombok; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; -import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; import java.io.IOException; import java.util.function.Predicate; @@ -34,8 +32,7 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob boolean shouldCapture, HttpRequest httpRequest) throws Exception { if (shouldCapture && shouldBlockPredicate.test(httpRequest)) { - messageContext.meterIncrementEvent("blockingRequestUntilFlush"); - rotateNextMessageContext(HttpMessageContext.HttpTransactionState.INTERNALLY_BLOCKED); + rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.INTERNALLY_BLOCKED); trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); messageContext.meterIncrementEvent(t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index f4d13ecdc..6d4dda0a3 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -21,13 +21,11 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; -import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; @@ -137,7 +135,7 @@ public HttpRequest resetCurrentRequest() { protected final EmbeddedChannel httpDecoderChannel; - protected HttpMessageContext messageContext; + protected WireCaptureContexts.HttpMessageContext messageContext; public LoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, String nodeId, String channelKey, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @@ -145,10 +143,10 @@ public LoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, String n throws IOException { var parentContext = new ConnectionContext(rootContext, channelKey, nodeId); - this.messageContext = new HttpMessageContext(parentContext, 0, HttpMessageContext.HttpTransactionState.REQUEST); + this.messageContext = new WireCaptureContexts.HttpMessageContext(parentContext, 0, WireCaptureContexts.HttpMessageContext.HttpTransactionState.REQUEST); messageContext.meterIncrementEvent("requestStarted"); - this.trafficOffloader = trafficOffloaderFactory.createOffloader(parentContext, channelKey); + this.trafficOffloader = trafficOffloaderFactory.createOffloader(rootContext, parentContext, channelKey); var captureState = new CaptureState(); httpDecoderChannel = new EmbeddedChannel( new SimpleHttpRequestDecoder(httpHeadersCapturePredicate.getHeadersRequiredForMatcher(), captureState), @@ -156,9 +154,9 @@ public LoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, String n ); } - protected void rotateNextMessageContext(HttpMessageContext.HttpTransactionState nextState) { - messageContext = new HttpMessageContext(messageContext.getLogicalEnclosingScope(), - (nextState== HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) + protected void rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState nextState) { + messageContext = new WireCaptureContexts.HttpMessageContext(messageContext.getLogicalEnclosingScope(), + (nextState== WireCaptureContexts.HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) + messageContext.getSourceRequestIndex(), nextState); } @@ -206,7 +204,7 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, boolean shouldCapture, HttpRequest httpRequest) throws Exception { - rotateNextMessageContext(HttpMessageContext.HttpTransactionState.WAITING); + rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.WAITING); super.channelRead(ctx, msg); messageContext.meterIncrementEvent("requestReceived"); @@ -218,9 +216,9 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - if (messageContext.getState() == HttpMessageContext.HttpTransactionState.RESPONSE) { + if (messageContext.getState() == WireCaptureContexts.HttpMessageContext.HttpTransactionState.RESPONSE) { messageContext.endSpan(); // TODO - make this meter on create/close - rotateNextMessageContext(HttpMessageContext.HttpTransactionState.REQUEST); + rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.REQUEST); } var timestamp = Instant.now(); var requestParsingHandler = getHandlerThatHoldsParsedHttpRequest(); @@ -269,9 +267,9 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { - if (messageContext.getState() != HttpMessageContext.HttpTransactionState.RESPONSE) { + if (messageContext.getState() != WireCaptureContexts.HttpMessageContext.HttpTransactionState.RESPONSE) { messageContext.endSpan(); // TODO - make this meter on create/close - rotateNextMessageContext(HttpMessageContext.HttpTransactionState.RESPONSE); + rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.RESPONSE); } var bb = (ByteBuf) msg; if (getHandlerThatHoldsParsedHttpRequest().captureState.shouldCapture()) { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java index 4a670e8d0..b02272208 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestContextStateMachine.java @@ -2,7 +2,7 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.trafficcapture.netty.tracing.HttpMessageContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; /** @@ -16,7 +16,7 @@ public class RequestContextStateMachine { @Getter public final ConnectionContext connectionContext; @Getter - HttpMessageContext currentRequestContext; + WireCaptureContexts.HttpMessageContext currentRequestContext; public RequestContextStateMachine(ConnectionContext incoming) { connectionContext = incoming; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java deleted file mode 100644 index 45eadadee..000000000 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/HttpMessageContext.java +++ /dev/null @@ -1,61 +0,0 @@ -package org.opensearch.migrations.trafficcapture.netty.tracing; - -import lombok.Getter; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; - -import java.time.Instant; - -public class HttpMessageContext extends DirectNestedSpanContext - implements IHttpTransactionContext, IWithStartTimeAndAttributes { - public static final String SCOPE_NAME = "CapturingHttpHandler"; - - public static final String GATHERING_REQUEST = "gatheringRequest"; - public static final String BLOCKED = "blocked"; - public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; - public static final String GATHERING_RESPONSE = "gatheringResponse"; - - public enum HttpTransactionState { - REQUEST, - INTERNALLY_BLOCKED, - WAITING, - RESPONSE - } - - @Getter - final long sourceRequestIndex; - @Getter - final HttpTransactionState state; - - static String getSpanLabelForState(HttpMessageContext.HttpTransactionState state) { - switch (state) { - case REQUEST: - return GATHERING_REQUEST; - case INTERNALLY_BLOCKED: - return BLOCKED; - case WAITING: - return WAITING_FOR_RESPONSE; - case RESPONSE: - return GATHERING_RESPONSE; - default: - throw new IllegalStateException("Unknown enum value: "+state); - } - } - - - public HttpMessageContext(IConnectionContext enclosingScope, long sourceRequestIndex, HttpTransactionState state) { - super(enclosingScope); - this.sourceRequestIndex = sourceRequestIndex; - this.state = state; - initializeSpan(); - } - - @Override - public String getActivityName() { - return getSpanLabelForState(state); - } - @Override - public String getScopeName() { return SCOPE_NAME; } -} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java index db9570049..01b314ea9 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java @@ -1,8 +1,12 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; -import io.opentelemetry.api.metrics.Meter; -import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; +import lombok.Getter; +import org.opensearch.migrations.tracing.IRootOtelContext; -public interface IRootWireLoggingContext extends IRootOffloaderContext { +public interface IRootWireLoggingContext extends IRootOtelContext { + WireCaptureContexts.RequestContext.MetricInstruments getHttpRequestInstruments(); + WireCaptureContexts.BlockingContext.MetricInstruments getBlockingInstruments(); + WireCaptureContexts.WaitingForResponseContext.MetricInstruments getWaitingForResponseInstruments(); + WireCaptureContexts.ResponseContext.MetricInstruments getResponseInstruments(); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java index 9c7eac91b..74db849b8 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java @@ -1,17 +1,27 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.metrics.Meter; import lombok.Getter; import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; public class RootWireLoggingContext extends RootOffloaderContext implements IRootWireLoggingContext { public static final String SCOPE_NAME = "NettyCapture"; - @Getter - Meter wireLoggingMeter; + + @Getter public final WireCaptureContexts.RequestContext.MetricInstruments httpRequestInstruments; + @Getter public final WireCaptureContexts.BlockingContext.MetricInstruments blockingInstruments; + @Getter public final WireCaptureContexts.WaitingForResponseContext.MetricInstruments waitingForResponseInstruments; + @Getter public final WireCaptureContexts.ResponseContext.MetricInstruments responseInstruments; public RootWireLoggingContext(OpenTelemetry openTelemetry) { + this(openTelemetry, SCOPE_NAME); + } + + public RootWireLoggingContext(OpenTelemetry openTelemetry, String scopeName) { super(openTelemetry); - wireLoggingMeter = super.getMeterForScope(SCOPE_NAME); + var meter = this.getMeterProvider().get(scopeName); + httpRequestInstruments = new WireCaptureContexts.RequestContext.MetricInstruments(meter); + blockingInstruments = new WireCaptureContexts.BlockingContext.MetricInstruments(meter); + waitingForResponseInstruments = new WireCaptureContexts.WaitingForResponseContext.MetricInstruments(meter); + responseInstruments = new WireCaptureContexts.ResponseContext.MetricInstruments(meter); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java new file mode 100644 index 000000000..3b631846c --- /dev/null +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -0,0 +1,125 @@ +package org.opensearch.migrations.trafficcapture.netty.tracing; + +import io.opentelemetry.api.metrics.Meter; +import lombok.Getter; +import org.opensearch.migrations.tracing.BaseNestedSpanContext; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; +import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; +import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; + +public class WireCaptureContexts { + private WireCaptureContexts() {} + + public static abstract class HttpMessageContext extends + BaseNestedSpanContext + implements IHttpTransactionContext, IWithStartTimeAndAttributes { + + @Getter + final long sourceRequestIndex; + + public HttpMessageContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, + long sourceRequestIndex) { + super(rootWireLoggingContext, enclosingScope); + this.sourceRequestIndex = sourceRequestIndex; + } + } + + public static class RequestContext extends HttpMessageContext { + public static final String ACTIVITY_NAME = "gatheringRequest"; + + public RequestContext(RootWireLoggingContext rootWireLoggingContext, + IConnectionContext enclosingScope, + long sourceRequestIndex) { + super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); + } + + @Override + public String getActivityName() { + return ACTIVITY_NAME; + } + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); + } + } + + @Override + public MetricInstruments getMetrics() { + return getRootInstrumentationScope().httpRequestInstruments; + } + } + + public static class BlockingContext extends HttpMessageContext { + public static final String ACTIVITY_NAME = "blocked"; + + public BlockingContext(RootWireLoggingContext rootWireLoggingContext, + IConnectionContext enclosingScope, + long sourceRequestIndex) { + super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); + } + + @Override + public String getActivityName() { + return ACTIVITY_NAME; + } + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); + } + } + + @Override + public RequestContext.MetricInstruments getMetrics() { + return getRootInstrumentationScope().httpRequestInstruments; + } + } + + public static class WaitingForResponseContext extends HttpMessageContext { + public static final String ACTIVITY_NAME = "waitingForResponse"; + public WaitingForResponseContext(RootWireLoggingContext rootWireLoggingContext, + IConnectionContext enclosingScope, + long sourceRequestIndex) { + super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); + } + + @Override + public String getActivityName() { + return ACTIVITY_NAME; + } + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); + } + } + + @Override + public RequestContext.MetricInstruments getMetrics() { + return getRootInstrumentationScope().httpRequestInstruments; + } + } + + public static class ResponseContext extends HttpMessageContext { + public static final String ACTIVITY_NAME = "gatheringResponse"; + public ResponseContext(RootWireLoggingContext rootWireLoggingContext, + IConnectionContext enclosingScope, + long sourceRequestIndex) { + super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); + } + + @Override + public String getActivityName() { + return ACTIVITY_NAME; + } + public static class MetricInstruments extends CommonScopedMetricInstruments { + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); + } + } + + @Override + public RequestContext.MetricInstruments getMetrics() { + return getRootInstrumentationScope().httpRequestInstruments; + } + } +} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index 8f147a082..ffe036611 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -177,7 +177,7 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte .thenAccept(cf-> { cf.channel().close() .addListener(closeFuture -> { - channelAndFutureWork.getChannelContext().onTargetConnectionClosed(); + channelAndFutureWork.getChannelContext().onConnectionClosed(); if (closeFuture.isSuccess()) { channelClosedFuture.future.complete(channelAndFutureWork.getInnerChannelFuture().channel()); } else { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 956c3ba36..a5d75c1ab 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -639,7 +639,7 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, } } - Void handleCompletedTransaction(IInstrumentationAttributes context, + Void handleCompletedTransaction(IReplayContexts.IReplayerHttpTransactionContext context, @NonNull UniqueReplayerRequestKey requestKey, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { @@ -652,7 +652,7 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, try (var tupleHandlingContext = httpContext.createTupleContext()) { packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); } - commitTrafficStreams(rrPair.completionStatus, context, rrPair.trafficStreamKeysBeingHeld); + commitTrafficStreams(rrPair.completionStatus, rrPair.trafficStreamKeysBeingHeld); return null; } else { log.atError().setCause(t).setMessage(()->"Throwable passed to handle() for " + requestKey + @@ -688,25 +688,23 @@ Void handleCompletedTransaction(IInstrumentationAttributes context, public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { - commitTrafficStreams(status, ctx, trafficStreamKeysBeingHeld); + commitTrafficStreams(status, trafficStreamKeysBeingHeld); } @SneakyThrows private void commitTrafficStreams(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext context, List trafficStreamKeysBeingHeld) { commitTrafficStreams(status != RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY, - context, trafficStreamKeysBeingHeld); + trafficStreamKeysBeingHeld); } @SneakyThrows private void commitTrafficStreams(boolean shouldCommit, - IReplayContexts.IChannelKeyContext context, List trafficStreamKeysBeingHeld) { if (shouldCommit && trafficStreamKeysBeingHeld != null) { for (var tsk : trafficStreamKeysBeingHeld) { tsk.getTrafficStreamsContext().close(); - trafficCaptureSource.commitTrafficStream(()->context, tsk); + trafficCaptureSource.commitTrafficStream(tsk); } } } @@ -720,13 +718,13 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, replayEngine.setFirstTimestamp(timestamp); var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); cf.map(f->f.whenComplete((v,t)->{ - commitTrafficStreams(status, ctx, trafficStreamKeysBeingHeld); + commitTrafficStreams(status, trafficStreamKeysBeingHeld); }), ()->"closing the channel in the ReplayEngine"); } @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IReplayContexts.IChannelKeyContext ctx) { - commitTrafficStreams(true, ctx, List.of(tsk)); + commitTrafficStreams(true, List.of(tsk)); } private TransformedTargetRequestAndResponse diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index c312333c8..5d9adae03 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -126,7 +126,7 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup var rval = new DefaultChannelPromise(outboundChannelFuture.channel()); outboundChannelFuture.addListener((ChannelFutureListener) connectFuture -> { if (connectFuture.isSuccess()) { - channelKeyContext.onTargetConnectionCreated(); + channelKeyContext.onConnectionCreated(); var pipeline = connectFuture.channel().pipeline(); pipeline.removeFirst(); log.atTrace().setMessage(()-> channelKeyContext.getChannelKey() + diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index a8996ccdb..01088398e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -16,6 +16,7 @@ import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -79,12 +80,12 @@ public class KafkaTrafficCaptureSource implements ISimpleTrafficCaptureSource { private final KafkaBehavioralPolicy behavioralPolicy; private final ChannelContextManager channelContextManager; - public KafkaTrafficCaptureSource(@NonNull IInstrumentationAttributes globalContext, + public KafkaTrafficCaptureSource(@NonNull RootReplayerContext globalContext, Consumer kafkaConsumer, String topic, Duration keepAliveInterval) { this(globalContext, kafkaConsumer, topic, keepAliveInterval, Clock.systemUTC(), new KafkaBehavioralPolicy()); } - public KafkaTrafficCaptureSource(@NonNull IInstrumentationAttributes globalContext, + public KafkaTrafficCaptureSource(@NonNull RootReplayerContext globalContext, Consumer kafkaConsumer, @NonNull String topic, Duration keepAliveInterval, @@ -111,7 +112,7 @@ private void onKeyFinishedCommitting(ITrafficStreamKey trafficStreamKey) { channelContextManager.releaseContextFor(kafkaCtx.getImmediateEnclosingScope()); } - public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull IInstrumentationAttributes globalContext, + public static KafkaTrafficCaptureSource buildKafkaSource(@NonNull RootReplayerContext globalContext, @NonNull String brokers, @NonNull String topic, @NonNull String groupId, @@ -171,7 +172,7 @@ public static Properties buildKafkaProperties(@NonNull String brokers, @Override @SneakyThrows - public void touch(ITrafficSourceContexts.IReadChunkContext context) { + public void touch(ITrafficSourceContexts.IBackPressureBlockContext context) { CompletableFuture.runAsync(()->trackingKafkaConsumer.touch(context), kafkaExecutor).get(); } @@ -192,7 +193,7 @@ public Optional getNextRequiredTouch() { log.atTrace().setMessage("readNextTrafficStreamChunk()").log(); return CompletableFuture.supplyAsync(() -> { log.atTrace().setMessage("async...readNextTrafficStreamChunk()").log(); - return readNextTrafficStreamSynchronously(contextSupplier); + return readNextTrafficStreamSynchronously(contextSupplier.get()); }, kafkaExecutor); } @@ -237,8 +238,7 @@ public Optional getNextRequiredTouch() { } @Override - public CommitResult commitTrafficStream(IInstrumentationAttributes context, - ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { if (!(trafficStreamKey instanceof TrafficStreamKeyWithKafkaRecordId)) { throw new IllegalArgumentException("Expected key of type "+TrafficStreamKeyWithKafkaRecordId.class+ " but received "+trafficStreamKey+" (of type="+trafficStreamKey.getClass()+")"); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index de4684370..9169d16eb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -27,12 +27,8 @@ private MetricNames() {} } interface IAsyncListeningContext extends IInstrumentationAttributes { - String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; - @Override default String getScopeName() { return SCOPE_NAME; } } interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { - String SCOPE_NAME = ScopeNames.KAFKA_CONSUMER_SCOPE; - @Override default String getScopeName() { return SCOPE_NAME; } } interface ITouchScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.TOUCH; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index fca328eed..a08c98a1b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -72,12 +72,6 @@ private MetricNames() {} } public interface IAccumulationScope extends IScopedInstrumentationAttributes { - String SCOPE_NAME2 = "Replay"; - - @Override - default String getScopeName() { - return SCOPE_NAME2; - } } public interface IChannelKeyContext @@ -98,10 +92,6 @@ default String getConnectionId() { default String getNodeId() { return getChannelKey().getNodeId(); } - - void onTargetConnectionCreated(); - - void onTargetConnectionClosed(); } public interface IKafkaRecordContext diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java index d6702ba62..235fc31b0 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -15,11 +15,8 @@ private ActivityNames() {} public static final String WAIT_FOR_NEXT_BACK_PRESSURE_CHECK = "waitForNextBackPressureCheck"; } - interface ITrafficSourceContext extends IScopedInstrumentationAttributes { - String SCOPE_NAME = ScopeNames.TRAFFIC_SCOPE; - @Override default String getScopeName() { return SCOPE_NAME; } + interface ITrafficSourceContext extends IScopedInstrumentationAttributes { } - } interface IReadChunkContext extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.READ_NEXT_TRAFFIC_CHUNK; @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 1573b2c76..67a799c15 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -4,6 +4,7 @@ import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import lombok.AllArgsConstructor; import lombok.Getter; @@ -28,8 +29,7 @@ public static class MetricInstruments { public final LongCounter kafkaPartitionsRevokedCounter; public final LongCounter kafkaPartitionsAssignedCounter; public final LongUpDownCounter kafkaActivePartitionsCounter; - public MetricInstruments(MeterProvider meterProvider) { - var meter = meterProvider.get(SCOPE_NAME); + public MetricInstruments(Meter meter) { kafkaPartitionsRevokedCounter = meter .counterBuilder(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT).build(); kafkaPartitionsAssignedCounter = meter @@ -72,8 +72,8 @@ public IKafkaConsumerContexts.IPollScopeContext createNewPollContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public TouchScopeContext(@NonNull TrafficSourceContexts.BackPressureBlockContext enclosingScope) { @@ -90,8 +90,8 @@ public static class PollScopeContext extends BaseNestedSpanContext implements IKafkaConsumerContexts.IPollScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } @@ -117,8 +117,8 @@ public IKafkaConsumerContexts.IKafkaCommitScopeContext createNewKafkaCommitConte } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } @@ -138,8 +138,8 @@ public static class KafkaCommitScopeContext extends DirectNestedSpanContext implements IKafkaConsumerContexts.IKafkaCommitScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 13ad84d6e..b3643872a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -3,6 +3,7 @@ import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; import lombok.NonNull; @@ -14,7 +15,6 @@ import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.tracing.IndirectNestedSpanContext; import java.time.Duration; import java.time.Instant; @@ -42,16 +42,15 @@ public ChannelKeyContext(RootReplayerContext rootScope, public static class MetricInstruments extends CommonScopedMetricInstruments { final LongUpDownCounter activeChannelCounter; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); activeChannelCounter = meter .upDownCounterBuilder(IReplayContexts.MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().channelKeyContext; + return getRootInstrumentationScope().channelKeyInstruments; } @Override @@ -60,11 +59,11 @@ public String toString() { } @Override - public void onTargetConnectionCreated() { + public void onConnectionCreated() { meterDeltaEvent(getMetrics().activeChannelCounter, 1); } @Override - public void onTargetConnectionClosed() { + public void onConnectionClosed() { meterDeltaEvent(getMetrics().activeChannelCounter, -1); } } @@ -86,9 +85,8 @@ public KafkaRecordContext(ChannelKeyContext enclosingScope, String recordId, int public static class MetricInstruments extends CommonScopedMetricInstruments { final LongCounter recordCounter; final LongCounter bytesCounter; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); recordCounter = meter.counterBuilder(IReplayContexts.MetricNames.KAFKA_RECORD_READ) .setUnit("records").build(); bytesCounter = meter.counterBuilder(IReplayContexts.MetricNames.KAFKA_BYTES_READ) @@ -97,7 +95,7 @@ public MetricInstruments(MeterProvider meterProvider) { } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().kafkaRecordContext; + return getRootInstrumentationScope().kafkaRecordInstruments; } @Override @@ -113,31 +111,41 @@ public String getRecordId() { } public static class TrafficStreamsLifecycleContext - extends IndirectNestedSpanContext + extends BaseNestedSpanContext implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; - public TrafficStreamsLifecycleContext(KafkaRecordContext enclosingScope, - ITrafficStreamKey trafficStreamKey) { - super(enclosingScope); + protected TrafficStreamsLifecycleContext(IInstrumentationAttributes enclosingScope, + ITrafficStreamKey trafficStreamKey, + RootReplayerContext rootScope) { + super(rootScope, enclosingScope); this.trafficStreamKey = trafficStreamKey; initializeSpan(); meterIncrementEvent(getMetrics().streamsRead); } + public TrafficStreamsLifecycleContext(KafkaRecordContext enclosingScope, + ITrafficStreamKey trafficStreamKey) { + this(enclosingScope, trafficStreamKey, enclosingScope.getRootInstrumentationScope()); + } + + protected TrafficStreamsLifecycleContext(ChannelKeyContext enclosingScope, + ITrafficStreamKey trafficStreamKey) { + this(enclosingScope, trafficStreamKey, enclosingScope.getRootInstrumentationScope()); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter streamsRead; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); streamsRead = meter.counterBuilder(IReplayContexts.MetricNames.TRAFFIC_STREAMS_READ) .setUnit("objects").build(); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().trafficStreamLifecycleContext; + return getRootInstrumentationScope().trafficStreamLifecycleInstruments; } @Override @@ -159,7 +167,11 @@ public ITrafficStreamKey getTrafficStreamKey() { @Override public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { - return getImmediateEnclosingScope().getLogicalEnclosingScope(); + var parent = getEnclosingScope(); + while(!(parent instanceof IReplayContexts.IChannelKeyContext)) { + parent = parent.getEnclosingScope(); + } + return (IReplayContexts.IChannelKeyContext) parent; } } @@ -185,13 +197,13 @@ public IReplayContexts.ITupleHandlingContext createTupleContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().httpTransactionContext; + return getRootInstrumentationScope().httpTransactionInstruments; } public IReplayContexts.IChannelKeyContext getChannelKeyContext() { @@ -243,13 +255,13 @@ public RequestAccumulationContext(HttpTransactionContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().requestAccumContext; + return getRootInstrumentationScope().requestAccumInstruments; } } @@ -262,13 +274,13 @@ public ResponseAccumulationContext(HttpTransactionContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().responseAccumContext; + return getRootInstrumentationScope().responseAccumInstruments; } } @@ -298,9 +310,8 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter transformBytesOut; private final LongCounter transformChunksOut; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); headerParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_HEADER_PARSE) .setUnit(COUNT_UNIT_STR).build(); payloadParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED) @@ -338,7 +349,7 @@ public MetricInstruments(MeterProvider meterProvider) { } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().transformationContext; + return getRootInstrumentationScope().transformationInstruments; } @Override public void onHeaderParse() { @@ -400,15 +411,14 @@ public ScheduledContext(HttpTransactionContext enclosingScope, Instant scheduled public static class MetricInstruments extends CommonScopedMetricInstruments { DoubleHistogram lag; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); lag = meter.histogramBuilder(IReplayContexts.MetricNames.NETTY_SCHEDULE_LAG).setUnit("ms").build(); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().scheduledContext; + return getRootInstrumentationScope().scheduledInstruments; } @Override @@ -434,9 +444,8 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter bytesWritten; private final LongCounter bytesRead; - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); sourceTargetGap = meter.histogramBuilder(IReplayContexts.MetricNames.SOURCE_TO_TARGET_REQUEST_LAG) .setUnit("ms").build(); bytesWritten = meter.counterBuilder(IReplayContexts.MetricNames.BYTES_WRITTEN_TO_TARGET) @@ -447,7 +456,7 @@ public MetricInstruments(MeterProvider meterProvider) { } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().targetRequestContext; + return getRootInstrumentationScope().targetRequestInstruments; } @Override @@ -480,13 +489,13 @@ public RequestSendingContext(TargetRequestContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().requestSendingContext; + return getRootInstrumentationScope().requestSendingInstruments; } } @@ -499,13 +508,13 @@ public WaitingForHttpResponseContext(TargetRequestContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().waitingForHttpResponseContext; + return getRootInstrumentationScope().waitingForHttpResponseInstruments; } } @@ -519,13 +528,13 @@ public ReceivingHttpResponseContext(TargetRequestContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().receivingHttpContext; + return getRootInstrumentationScope().receivingHttpInstruments; } } @@ -539,13 +548,13 @@ public TupleHandlingContext(HttpTransactionContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { - return getRootInstrumentationScope().tupleHandlingContext; + return getRootInstrumentationScope().tupleHandlingInstruments; } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 5c6916047..30cc6dcf9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -1,19 +1,15 @@ package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.DoubleHistogram; -import io.opentelemetry.api.metrics.LongUpDownCounter; -import io.opentelemetry.api.metrics.MeterProvider; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; -import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.tracing.RootOtelContext; import lombok.Getter; @Getter public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { + public static final String SCOPE_NAME = "replayer"; + public final KafkaConsumerContexts.AsyncListeningContext.MetricInstruments asyncListeningInstruments; public final KafkaConsumerContexts.TouchScopeContext.MetricInstruments touchInstruments; public final KafkaConsumerContexts.PollScopeContext.MetricInstruments pollInstruments; @@ -24,52 +20,47 @@ public class RootReplayerContext extends RootOtelContext implements IRootReplaye public final TrafficSourceContexts.BackPressureBlockContext.MetricInstruments backPressureInstruments; public final TrafficSourceContexts.WaitForNextSignal.MetricInstruments waitForNextSignalInstruments; - public final ReplayContexts.ChannelKeyContext.MetricInstruments channelKeyContext; - public final ReplayContexts.KafkaRecordContext.MetricInstruments kafkaRecordContext; - public final ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments trafficStreamLifecycleContext; - public final ReplayContexts.HttpTransactionContext.MetricInstruments httpTransactionContext; - public final ReplayContexts.RequestAccumulationContext.MetricInstruments requestAccumContext; - public final ReplayContexts.ResponseAccumulationContext.MetricInstruments responseAccumContext; - public final ReplayContexts.RequestTransformationContext.MetricInstruments transformationContext; - public final ReplayContexts.ScheduledContext.MetricInstruments scheduledContext; - public final ReplayContexts.TargetRequestContext.MetricInstruments targetRequestContext; - public final ReplayContexts.RequestSendingContext.MetricInstruments requestSendingContext; - public final ReplayContexts.WaitingForHttpResponseContext.MetricInstruments waitingForHttpResponseContext; - public final ReplayContexts.ReceivingHttpResponseContext.MetricInstruments receivingHttpContext; - public final ReplayContexts.TupleHandlingContext.MetricInstruments tupleHandlingContext; - - public final InputStreamOfTraffic.IOSTrafficStreamContext.MetricInstruments directInputStreamContext; + public final ReplayContexts.ChannelKeyContext.MetricInstruments channelKeyInstruments; + public final ReplayContexts.KafkaRecordContext.MetricInstruments kafkaRecordInstruments; + public final ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments trafficStreamLifecycleInstruments; + public final ReplayContexts.HttpTransactionContext.MetricInstruments httpTransactionInstruments; + public final ReplayContexts.RequestAccumulationContext.MetricInstruments requestAccumInstruments; + public final ReplayContexts.ResponseAccumulationContext.MetricInstruments responseAccumInstruments; + public final ReplayContexts.RequestTransformationContext.MetricInstruments transformationInstruments; + public final ReplayContexts.ScheduledContext.MetricInstruments scheduledInstruments; + public final ReplayContexts.TargetRequestContext.MetricInstruments targetRequestInstruments; + public final ReplayContexts.RequestSendingContext.MetricInstruments requestSendingInstruments; + public final ReplayContexts.WaitingForHttpResponseContext.MetricInstruments waitingForHttpResponseInstruments; + public final ReplayContexts.ReceivingHttpResponseContext.MetricInstruments receivingHttpInstruments; + public final ReplayContexts.TupleHandlingContext.MetricInstruments tupleHandlingInstruments; public RootReplayerContext(OpenTelemetry sdk) { - super(sdk); - var meterProvider = this.getMeterProvider(); - - asyncListeningInstruments = new KafkaConsumerContexts.AsyncListeningContext.MetricInstruments(meterProvider); - touchInstruments = new KafkaConsumerContexts.TouchScopeContext.MetricInstruments(meterProvider); - pollInstruments = new KafkaConsumerContexts.PollScopeContext.MetricInstruments(meterProvider); - commitInstruments = new KafkaConsumerContexts.CommitScopeContext.MetricInstruments(meterProvider); - kafkaCommitInstruments = new KafkaConsumerContexts.KafkaCommitScopeContext.MetricInstruments(meterProvider); - - directInputStreamContext = new InputStreamOfTraffic.IOSTrafficStreamContext.MetricInstruments(meterProvider); + super(SCOPE_NAME, sdk); + var meter = this.getMeterProvider().get(SCOPE_NAME); - readChunkInstruments = new TrafficSourceContexts.ReadChunkContext.MetricInstruments(meterProvider); - backPressureInstruments = new TrafficSourceContexts.BackPressureBlockContext.MetricInstruments(meterProvider); - waitForNextSignalInstruments = new TrafficSourceContexts.WaitForNextSignal.MetricInstruments(meterProvider); + asyncListeningInstruments = new KafkaConsumerContexts.AsyncListeningContext.MetricInstruments(meter); + touchInstruments = new KafkaConsumerContexts.TouchScopeContext.MetricInstruments(meter); + pollInstruments = new KafkaConsumerContexts.PollScopeContext.MetricInstruments(meter); + commitInstruments = new KafkaConsumerContexts.CommitScopeContext.MetricInstruments(meter); + kafkaCommitInstruments = new KafkaConsumerContexts.KafkaCommitScopeContext.MetricInstruments(meter); + readChunkInstruments = new TrafficSourceContexts.ReadChunkContext.MetricInstruments(meter); + backPressureInstruments = new TrafficSourceContexts.BackPressureBlockContext.MetricInstruments(meter); + waitForNextSignalInstruments = new TrafficSourceContexts.WaitForNextSignal.MetricInstruments(meter); - channelKeyContext = new ReplayContexts.ChannelKeyContext.MetricInstruments(meterProvider); - kafkaRecordContext = new ReplayContexts.KafkaRecordContext.MetricInstruments(meterProvider); - trafficStreamLifecycleContext = new ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments(meterProvider); - httpTransactionContext = new ReplayContexts.HttpTransactionContext.MetricInstruments(meterProvider); - requestAccumContext = new ReplayContexts.RequestAccumulationContext.MetricInstruments(meterProvider); - responseAccumContext = new ReplayContexts.ResponseAccumulationContext.MetricInstruments(meterProvider); - transformationContext = new ReplayContexts.RequestTransformationContext.MetricInstruments(meterProvider); - scheduledContext = new ReplayContexts.ScheduledContext.MetricInstruments(meterProvider); - targetRequestContext = new ReplayContexts.TargetRequestContext.MetricInstruments(meterProvider); - requestSendingContext = new ReplayContexts.RequestSendingContext.MetricInstruments(meterProvider); - waitingForHttpResponseContext = new ReplayContexts.WaitingForHttpResponseContext.MetricInstruments(meterProvider); - receivingHttpContext = new ReplayContexts.ReceivingHttpResponseContext.MetricInstruments(meterProvider); - tupleHandlingContext = new ReplayContexts.TupleHandlingContext.MetricInstruments(meterProvider); + channelKeyInstruments = new ReplayContexts.ChannelKeyContext.MetricInstruments(meter); + kafkaRecordInstruments = new ReplayContexts.KafkaRecordContext.MetricInstruments(meter); + trafficStreamLifecycleInstruments = new ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments(meter); + httpTransactionInstruments = new ReplayContexts.HttpTransactionContext.MetricInstruments(meter); + requestAccumInstruments = new ReplayContexts.RequestAccumulationContext.MetricInstruments(meter); + responseAccumInstruments = new ReplayContexts.ResponseAccumulationContext.MetricInstruments(meter); + transformationInstruments = new ReplayContexts.RequestTransformationContext.MetricInstruments(meter); + scheduledInstruments = new ReplayContexts.ScheduledContext.MetricInstruments(meter); + targetRequestInstruments = new ReplayContexts.TargetRequestContext.MetricInstruments(meter); + requestSendingInstruments = new ReplayContexts.RequestSendingContext.MetricInstruments(meter); + waitingForHttpResponseInstruments = new ReplayContexts.WaitingForHttpResponseContext.MetricInstruments(meter); + receivingHttpInstruments = new ReplayContexts.ReceivingHttpResponseContext.MetricInstruments(meter); + tupleHandlingInstruments = new ReplayContexts.TupleHandlingContext.MetricInstruments(meter); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index 15e269c72..d5e9c5f7b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; @@ -31,8 +32,8 @@ public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { @@ -66,8 +67,8 @@ public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { @@ -85,8 +86,8 @@ public static class WaitForNextSignal extends BaseNestedSpanContext implements ITrafficSourceContexts.IWaitForNextSignal { public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME, ACTIVITY_NAME); + public MetricInstruments(Meter meter) { + super(meter, ACTIVITY_NAME); } } public @NonNull MetricInstruments getMetrics() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 097d742b5..6e0e4f919 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -166,10 +166,8 @@ private Void blockIfNeeded(ITrafficSourceContexts.IReadChunkContext readContext) } @Override - public CommitResult commitTrafficStream(Function contextFactory, - ITrafficStreamKey trafficStreamKey) throws IOException { - var commitResult = underlyingSource.commitTrafficStream(contextFactory, trafficStreamKey); + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { + var commitResult = underlyingSource.commitTrafficStream(trafficStreamKey); if (commitResult == CommitResult.AfterNextRead) { readGate.drainPermits(); readGate.release(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java index ebc8c4da3..cd9d6426f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java @@ -22,9 +22,7 @@ enum CommitResult { CompletableFuture> readNextTrafficStreamChunk(Supplier contextSupplier); - CommitResult commitTrafficStream(Function contextFactory, - ITrafficStreamKey trafficStreamKey) throws IOException; + CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException; default void close() throws IOException {} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 59146db1d..072b5b263 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -1,30 +1,20 @@ package org.opensearch.migrations.replay.traffic.source; -import io.opentelemetry.api.metrics.LongUpDownCounter; -import io.opentelemetry.api.metrics.MeterProvider; -import lombok.Getter; import lombok.Lombok; -import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; -import java.time.Instant; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; @@ -42,42 +32,9 @@ public InputStreamOfTraffic(RootReplayerContext context, InputStream inputStream this.inputStream = inputStream; } - public static class IOSTrafficStreamContext - extends DirectNestedSpanContext - implements IReplayContexts.ITrafficStreamsLifecycleContext { - @Getter private final ITrafficStreamKey trafficStreamKey; - - public IOSTrafficStreamContext(@NonNull ReplayContexts.ChannelKeyContext ctx, - ITrafficStreamKey tsk) { - super(ctx); - this.trafficStreamKey = tsk; - initializeSpan(); - } - - public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(MeterProvider meterProvider) { - super(meterProvider, SCOPE_NAME2, ACTIVITY_NAME); - var meter = meterProvider.get(SCOPE_NAME2); - } - } - - public @NonNull ReplayContexts.ChannelKeyContext.MetricInstruments getMetrics() { - return getRootInstrumentationScope().channelKeyContext; - } - - @Override - public ReplayContexts.HttpTransactionContext createHttpTransactionContext(UniqueReplayerRequestKey requestKey, - Instant sourceTimestamp) { - return new ReplayContexts.HttpTransactionContext(getRootInstrumentationScope(), - this, requestKey, sourceTimestamp); - } - - @Override - public String getActivityName() { return "trafficStreamLifecycle"; } - - @Override - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { - return getImmediateEnclosingScope(); + public static final class IOSTrafficStreamContext extends ReplayContexts.TrafficStreamsLifecycleContext { + private IOSTrafficStreamContext(ReplayContexts.ChannelKeyContext enclosingScope, ITrafficStreamKey trafficStreamKey) { + super(enclosingScope, trafficStreamKey); } } @@ -113,9 +70,7 @@ public IReplayContexts.IChannelKeyContext getChannelKeyContext() { } @Override - public CommitResult commitTrafficStream(Function ctx, - ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { // do nothing - this datasource isn't transactional return CommitResult.Immediate; } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index 30460b6a9..ff770bfdc 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -3,6 +3,7 @@ import java.time.Instant; import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; @@ -15,17 +16,17 @@ public class TestRequestKey { private TestRequestKey() {} public static final ReplayContexts.HttpTransactionContext - getTestConnectionRequestContext(IInstrumentationAttributes ctx, int replayerIdx) { + getTestConnectionRequestContext(RootReplayerContext ctx, int replayerIdx) { return getTestConnectionRequestContext(ctx, DEFAULT_TEST_CONNECTION, replayerIdx); } public static ReplayContexts.HttpTransactionContext - getTestConnectionRequestContext(IInstrumentationAttributes ctx, String connectionId, int replayerIdx) { + getTestConnectionRequestContext(RootReplayerContext ctx, String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, tsk -> new TestTrafficStreamsLifecycleContext(ctx, tsk)), 0, replayerIdx); - return new ReplayContexts.HttpTransactionContext(rk.trafficStreamKey.getTrafficStreamsContext(), rk, - Instant.EPOCH); + return new ReplayContexts.HttpTransactionContext(ctx, rk.trafficStreamKey.getTrafficStreamsContext(), + rk, Instant.EPOCH); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index 0e4b4e207..b13c17d85 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -1,40 +1,22 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; +import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -class TestTrafficStreamsLifecycleContext - extends DirectNestedSpanContext - implements IReplayContexts.ITrafficStreamsLifecycleContext { +import java.time.Instant; +class TestTrafficStreamsLifecycleContext extends ReplayContexts.TrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; - public TestTrafficStreamsLifecycleContext(IInstrumentationAttributes rootContext, ITrafficStreamKey tsk) { - super(new ReplayContexts.ChannelKeyContext(rootContext, tsk)); + public TestTrafficStreamsLifecycleContext(RootReplayerContext rootContext, ITrafficStreamKey tsk) { + super(new ReplayContexts.ChannelKeyContext(rootContext, rootContext, tsk), tsk, rootContext); this.trafficStreamKey = tsk; initializeSpan(); } - - public static final String SCOPE_NAME = "testScope"; - @Override - public String getActivityName() { return "testTrafficSpan"; } - - @Override - public IReplayContexts.IChannelKeyContext getChannelKeyContext() { - return getLogicalEnclosingScope(); - } - - @Override - public ITrafficStreamKey getTrafficStreamKey() { - return trafficStreamKey; - } - - @Override - public void close() { - super.close(); - getLogicalEnclosingScope().close(); - } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index 077a6b03e..b6f56e1c5 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -13,6 +13,7 @@ import org.junit.jupiter.api.Assertions; import org.opensearch.migrations.replay.datahandlers.IPacketConsumer; import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.transform.IAuthTransformerFactory; @@ -121,7 +122,7 @@ private static String getStringFromContent(FullHttpRequest fullRequest) throws I return baos.toString(StandardCharsets.UTF_8); } } - static void runPipelineAndValidate(IInstrumentationAttributes rootContext, + static void runPipelineAndValidate(RootReplayerContext rootContext, IAuthTransformerFactory authTransformer, String extraHeaders, List stringParts, @@ -132,7 +133,7 @@ static void runPipelineAndValidate(IInstrumentationAttributes rootContext, authTransformer, extraHeaders, stringParts, expectedRequestHeaders, expectedOutputGenerator); } - static void runPipelineAndValidate(IInstrumentationAttributes rootContext, + static void runPipelineAndValidate(RootReplayerContext rootContext, IJsonTransformer transformer, IAuthTransformerFactory authTransformer, String extraHeaders, diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java new file mode 100644 index 000000000..43555e813 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -0,0 +1,24 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; + +public class TestContext extends RootReplayerContext { + + private final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; + + public static TestContext withTracking() { + return new TestContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), + InMemoryMetricExporter.create())); + } + + public static TestContext noTracking() { + return new TestContext(new InMemoryInstrumentationBundle(null, null)); + } + + public TestContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) { + super(inMemoryInstrumentationBundle.openTelemetrySdk); + this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; + } +} From 9601a68d98532da33ea6b8838090510e74f6464a Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 12 Jan 2024 18:49:54 -0500 Subject: [PATCH 57/94] More refactoring, still doesn't all compile, but most of it does (top-level Capture proxy is remaining). I need to stitch together a couple root context classes to provide disparate top-level contexts. Signed-off-by: Greg Schohn --- .../captureKafkaOffloader/build.gradle | 1 + .../kafkaoffloader/KafkaCaptureFactory.java | 22 +-- .../tracing/KafkaRecordContext.java | 3 - .../KafkaCaptureFactoryTest.java | 35 ++-- .../TestRootKafkaOffloaderContext.java | 44 +++++ .../FileConnectionCaptureFactory.java | 3 +- .../IConnectionCaptureFactory.java | 6 +- .../tracing/ConnectionContext.java | 10 +- .../tracing/IRootOffloaderContext.java | 23 +++ .../tracing/RootOffloaderContext.java | 22 --- .../InMemoryConnectionCaptureFactory.java | 4 +- .../tracing/BaseNestedSpanContext.java | 3 + .../tracing/IInstrumentationAttributes.java | 37 ++-- .../IScopedInstrumentationAttributes.java | 4 +- .../migrations/tracing/RootOtelContext.java | 7 +- .../IHttpTransactionContext.java | 2 - ...nditionallyReliableLoggingHttpHandler.java | 26 ++- .../netty/LoggingHttpHandler.java | 58 +++--- .../tracing/IRootWireLoggingContext.java | 12 +- .../netty/tracing/IWireCaptureContexts.java | 78 ++++++++ .../netty/tracing/RootWireLoggingContext.java | 24 ++- .../netty/tracing/WireCaptureContexts.java | 184 +++++++++++++++--- ...ionallyReliableLoggingHttpHandlerTest.java | 8 +- .../datatypes/PojoTrafficStreamKey.java | 2 +- .../PojoTrafficStreamKeyAndContext.java | 14 +- .../kafka/KafkaTrafficCaptureSource.java | 3 +- .../replay/tracing/ChannelContextManager.java | 13 +- .../replay/tracing/IReplayContexts.java | 13 +- .../replay/tracing/KafkaConsumerContexts.java | 16 +- .../replay/tracing/ReplayContexts.java | 84 ++++---- .../replay/tracing/RootReplayerContext.java | 16 +- .../traffic/source/BlockingTrafficSource.java | 4 +- .../traffic/source/ITrafficCaptureSource.java | 2 - .../traffic/source/InputStreamOfTraffic.java | 13 +- .../replay/BlockingTrafficSourceTest.java | 20 +- .../CompressedFileTrafficCaptureSource.java | 14 +- .../replay/FullTrafficReplayerTest.java | 16 +- .../KafkaRestartingTrafficReplayerTest.java | 7 +- .../replay/SentinelSensingTrafficSource.java | 12 +- ...afficToHttpTransactionAccumulatorTest.java | 17 +- .../replay/TrafficReplayerTest.java | 5 +- .../replay/TrafficStreamGenerator.java | 3 +- .../replay/V0_1TrafficCaptureSource.java | 3 +- .../KafkaCommitsWorkBetweenLongPolls.java | 13 +- .../replay/kafka/KafkaKeepAliveTests.java | 9 +- ...KafkaTrafficCaptureSourceLongTermTest.java | 5 +- .../kafka/KafkaTrafficCaptureSourceTest.java | 12 +- .../TestTrafficStreamsLifecycleContext.java | 9 +- .../migrations/tracing/TestContext.java | 2 +- 49 files changed, 615 insertions(+), 328 deletions(-) create mode 100644 TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java create mode 100644 TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java delete mode 100644 TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/RootOffloaderContext.java create mode 100644 TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index 38005eab0..e3ec41298 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -23,6 +23,7 @@ dependencies { testImplementation project(':captureProtobufs') testImplementation testFixtures(project(path: ':coreUtilities')) + testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 44478a7d1..b703c21be 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -61,9 +61,9 @@ public KafkaCaptureFactory(IRootKafkaOffloaderContext rootScope, String nodeId, @Override public IChannelConnectionCaptureSerializer - createOffloader(IConnectionContext ctx, String connectionId) { - return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, - new StreamManager(rootScope, ctx, connectionId)); + createOffloader(IConnectionContext ctx) { + return new StreamChannelConnectionCaptureSerializer<>(nodeId, ctx.getConnectionId(), + new StreamManager(rootScope, ctx)); } @AllArgsConstructor @@ -79,14 +79,12 @@ static class CodedOutputStreamWrapper implements CodedOutputStreamHolder { class StreamManager extends OrderedStreamLifecyleManager { IConnectionContext telemetryContext; IRootKafkaOffloaderContext rootScope; - String connectionId; Instant startTime; - public StreamManager(IRootKafkaOffloaderContext rootScope, IConnectionContext ctx, String connectionId) { + public StreamManager(IRootKafkaOffloaderContext rootScope, IConnectionContext ctx) { // TODO - add https://opentelemetry.io/blog/2022/instrument-kafka-clients/ this.rootScope = rootScope; this.telemetryContext = ctx; - this.connectionId = connectionId; this.startTime = Instant.now(); } @@ -113,7 +111,7 @@ public CodedOutputStreamWrapper createStream() { } var osh = (CodedOutputStreamWrapper) outputStreamHolder; - // Structured context for MetricsLogger + final var connectionId = telemetryContext.getConnectionId(); try { String recordId = String.format("%s.%d", connectionId, index); var byteBuffer = osh.byteBuffer; @@ -160,24 +158,16 @@ private Callback handleProducerRecordSent(CompletableFuture cf, // that field out of scope. return (metadata, exception) -> { log.atInfo().setMessage(()->"kafka completed sending a record").log(); - - flushContext.meterHistogramMicros(exception==null ? "stream_flush_success_ms" : "stream_flush_failure_ms"); - flushContext.meterIncrementEvent(exception==null ? "stream_flush_success" : "stream_flush_failure"); - flushContext.meterIncrementEvent( - exception==null ? "stream_flush_success_bytes" : "stream_flush_failure_bytes", - flushContext.getRecordSize()); - flushContext.close(); - if (exception != null) { flushContext.addException(exception); log.error("Error sending producer record: {}", recordId, exception); cf.completeExceptionally(exception); } else { - flushContext.onSuccessfulFlush(); log.debug("Kafka producer record: {} has finished sending for topic: {} and partition {}", recordId, metadata.topic(), metadata.partition()); cf.complete(metadata); } + flushContext.close(); }; } } diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index c6bb54b86..830c42c2d 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -10,7 +10,6 @@ import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; public class KafkaRecordContext extends BaseNestedSpanContext @@ -38,10 +37,8 @@ public KafkaRecordContext(IRootKafkaOffloaderContext rootScope, IConnectionConte } public static class MetricInstruments extends CommonScopedMetricInstruments { - private final LongCounter successCount; public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); - successCount = meter.counterBuilder() } } diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 52c93e635..7a8a899fb 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -21,9 +21,8 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; +import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.TestRootKafkaOffloaderContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; -import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; -import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -54,18 +53,17 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, int maxAllowableMessageSize = 1024*1024; MockProducer producer = new MockProducer<>(true, new StringSerializer(), new ByteArraySerializer()); KafkaCaptureFactory kafkaCaptureFactory = - new KafkaCaptureFactory(TEST_NODE_ID_STRING, producer, maxAllowableMessageSize); - IChannelConnectionCaptureSerializer serializer = kafkaCaptureFactory.createOffloader(createCtx(), connectionId); - - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < 15000; i++) { - sb.append("{ \"create\": { \"_index\": \"office-index\" } }\n{ \"title\": \"Malone's Cones\", \"year\": 2013 }\n"); - } - Assertions.assertTrue(sb.toString().getBytes().length > 1024*1024); - byte[] fakeDataBytes = sb.toString().getBytes(StandardCharsets.UTF_8); + new KafkaCaptureFactory(TestRootKafkaOffloaderContext.noTracking(), + TEST_NODE_ID_STRING, producer, maxAllowableMessageSize); + var serializer = kafkaCaptureFactory.createOffloader(createCtx()); + + var testStr = "{ \"create\": { \"_index\": \"office-index\" } }\n{ \"title\": \"Malone's Cones\", \"year\": 2013 }\n" + .repeat(15000); + var fakeDataBytes = testStr.getBytes(StandardCharsets.UTF_8); + Assertions.assertTrue(fakeDataBytes.length > 1024*1024); var bb = Unpooled.wrappedBuffer(fakeDataBytes); serializer.addReadEvent(referenceTimestamp, bb); - CompletableFuture future = serializer.flushCommitAndResetStream(true); + var future = serializer.flushCommitAndResetStream(true); future.get(); for (ProducerRecord record : producer.history()) { int recordSize = calculateRecordSize(record, null); @@ -78,7 +76,7 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, } private static ConnectionContext createCtx() { - return new ConnectionContext(new RootOffloaderContext(null), "test", "test"); + return new ConnectionContext(new TestRootKafkaOffloaderContext(null), "test", "test"); } /** @@ -104,8 +102,9 @@ private int calculateRecordSize(ProducerRecord record, String re @Test public void testLinearOffloadingIsSuccessful() throws IOException { KafkaCaptureFactory kafkaCaptureFactory = - new KafkaCaptureFactory(TEST_NODE_ID_STRING, mockProducer, 1024*1024); - IChannelConnectionCaptureSerializer offloader = kafkaCaptureFactory.createOffloader(createCtx(), connectionId); + new KafkaCaptureFactory(TestRootKafkaOffloaderContext.noTracking(), + TEST_NODE_ID_STRING, mockProducer, 1024*1024); + var offloader = kafkaCaptureFactory.createOffloader(createCtx()); List recordSentCallbacks = new ArrayList<>(3); when(mockProducer.send(any(), any())).thenAnswer(invocation -> { @@ -120,11 +119,11 @@ public void testLinearOffloadingIsSuccessful() throws IOException { byte[] fakeDataBytes = "FakeData".getBytes(StandardCharsets.UTF_8); var bb = Unpooled.wrappedBuffer(fakeDataBytes); offloader.addReadEvent(ts, bb); - CompletableFuture cf1 = offloader.flushCommitAndResetStream(false); + var cf1 = offloader.flushCommitAndResetStream(false); offloader.addReadEvent(ts, bb); - CompletableFuture cf2 = offloader.flushCommitAndResetStream(false); + var cf2 = offloader.flushCommitAndResetStream(false); offloader.addReadEvent(ts, bb); - CompletableFuture cf3 = offloader.flushCommitAndResetStream(false); + var cf3 = offloader.flushCommitAndResetStream(false); bb.release(); Assertions.assertEquals(false, cf1.isDone()); diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java new file mode 100644 index 000000000..bab196e0b --- /dev/null +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java @@ -0,0 +1,44 @@ +package org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import lombok.Getter; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; +import org.opensearch.migrations.tracing.RootOtelContext; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; + +public class TestRootKafkaOffloaderContext extends RootOtelContext implements IRootOffloaderContext, IRootKafkaOffloaderContext { + @Getter + public final KafkaRecordContext.MetricInstruments kafkaOffloadingInstruments; + @Getter + public final ConnectionContext.MetricInstruments connectionInstruments; + + private final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; + + public static TestRootKafkaOffloaderContext withTracking() { + return new TestRootKafkaOffloaderContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), + InMemoryMetricExporter.create())); + } + + public static TestRootKafkaOffloaderContext noTracking() { + return new TestRootKafkaOffloaderContext(new InMemoryInstrumentationBundle(null, null)); + } + + public TestRootKafkaOffloaderContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) { + super("tests", inMemoryInstrumentationBundle.openTelemetrySdk); + this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; + final var meter = getMeterProvider().get("test"); + this.kafkaOffloadingInstruments = new KafkaRecordContext.MetricInstruments(meter); + this.connectionInstruments = new ConnectionContext.MetricInstruments(meter); + } + + @Override + public Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { + return null; + } +} diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java index a566b9e9c..cf999108e 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java @@ -85,7 +85,8 @@ public CodedOutputStreamAndByteBufferWrapper createStream() { } @Override - public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) { + public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx) { + final var connectionId = ctx.getConnectionId(); return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager(connectionId)); } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java index 34539ce3e..f6d417c05 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/IConnectionCaptureFactory.java @@ -1,13 +1,9 @@ package org.opensearch.migrations.trafficcapture; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; -import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; import java.io.IOException; public interface IConnectionCaptureFactory { - IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, - String connectionId) throws IOException; + IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx) throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index c3f6266ea..fceb09a3d 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -15,8 +15,8 @@ import org.opensearch.migrations.tracing.IHasRootInstrumentationScope; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -public class ConnectionContext extends BaseNestedSpanContext - implements IConnectionContext, IHasRootInstrumentationScope { +public class ConnectionContext extends BaseNestedSpanContext + implements IConnectionContext, IHasRootInstrumentationScope { private static final AttributeNameMatchingPredicate KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT = AttributeNameMatchingPredicate.builder(true).add(CONNECTION_ID_ATTR.getKey()).build(); @@ -31,7 +31,7 @@ public class ConnectionContext extends BaseNestedSpanContext kickoffCloseStream(CodedOutputStreamHolder out } @Override - public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx, String connectionId) throws IOException { + public IChannelConnectionCaptureSerializer createOffloader(IConnectionContext ctx) throws IOException { // This array is only an indirection to work around Java's constraint that lambda values are final - return new StreamChannelConnectionCaptureSerializer<>(nodeId, connectionId, new StreamManager()); + return new StreamChannelConnectionCaptureSerializer<>(nodeId, ctx.getConnectionId(), new StreamManager()); } public Stream getRecordedTrafficStreamsStream() { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index c2a555ef4..a3542963c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -5,6 +5,7 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; import lombok.NonNull; +import lombok.Setter; import java.time.Instant; @@ -15,6 +16,8 @@ public abstract class BaseNestedSpanContext @Getter final Instant startTime; @Getter private Span currentSpan; @Getter private final S rootInstrumentationScope; + @Getter @Setter + Exception observedExceptionToIncludeInMetrics; protected BaseNestedSpanContext(S rootScope, T enclosingScope) { this.enclosingScope = enclosingScope; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index fccff732e..b9cfbc2c0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -1,15 +1,17 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; -import lombok.NonNull; import java.util.ArrayList; public interface IInstrumentationAttributes { + AttributeKey HAD_EXCEPTION_KEY = AttributeKey.booleanKey("hadException"); + IInstrumentationAttributes getEnclosingScope(); default Span getCurrentSpan() { return null; } @@ -17,11 +19,23 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; } - default Attributes getPopulatedAttributes(AttributesBuilder builder) { - return getPopulatedAttributesBuilder(builder).build(); + Exception getObservedExceptionToIncludeInMetrics(); + void setObservedExceptionToIncludeInMetrics(Exception e); + + default Attributes getPopulatedMetricAttributes() { + final var e = getObservedExceptionToIncludeInMetrics(); + return e == null ? null : Attributes.builder().put(HAD_EXCEPTION_KEY, true).build(); } - default AttributesBuilder getPopulatedAttributesBuilder(AttributesBuilder builder) { + default Attributes getPopulatedSpanAttributes() { + return getPopulatedSpanAttributes(Attributes.builder()); + } + + default Attributes getPopulatedSpanAttributes(AttributesBuilder builder) { + return getPopulatedSpanAttributesBuilder(builder).build(); + } + + default AttributesBuilder getPopulatedSpanAttributesBuilder(AttributesBuilder builder) { var currentObj = this; var stack = new ArrayList(); while (currentObj != null) { @@ -36,25 +50,16 @@ default AttributesBuilder getPopulatedAttributesBuilder(AttributesBuilder builde } default void meterIncrementEvent(LongCounter c) { - meterIncrementEvent(c, Attributes.builder()); - } - default void meterIncrementEvent(LongCounter c, AttributesBuilder attributesBuilder) { - meterIncrementEvent(c, 1, attributesBuilder); + meterIncrementEvent(c, 1); } default void meterIncrementEvent(LongCounter c, long increment) { - meterIncrementEvent (c, increment, Attributes.builder()); - } - default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - c.add(increment); + c.add(increment, getPopulatedMetricAttributes()); } } default void meterDeltaEvent(LongUpDownCounter c, long delta) { - meterDeltaEvent(c, delta, Attributes.builder()); - } - default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - c.add(delta); + c.add(delta, getPopulatedMetricAttributes()); } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 5a087944a..6e49e0aaa 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -33,12 +33,12 @@ default void close() { sendMeterEventsForEnd(); } - default void addException(Exception e) { + default void addException(Throwable e) { getCurrentSpan().recordException(e); sendMeterEventsForException(e); } - default void sendMeterEventsForException(Exception e) { + default void sendMeterEventsForException(Throwable e) { meterIncrementEvent(getMetrics().exceptionCounter); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 6be44ffd1..4d5b05a55 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -19,7 +19,9 @@ import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import lombok.Getter; import lombok.NonNull; +import lombok.Setter; import java.time.Duration; import java.util.Optional; @@ -28,6 +30,9 @@ public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; private final String scopeName; + @Getter + @Setter + Exception observedExceptionToIncludeInMetrics; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @NonNull String serviceName) { @@ -125,6 +130,6 @@ public Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); - return buildSpanWithParent(spanBuilder, getPopulatedAttributes(attributesBuilder), parentSpan, linkedSpan); + return buildSpanWithParent(spanBuilder, getPopulatedSpanAttributes(attributesBuilder), parentSpan, linkedSpan); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index 2a4f3c495..bebcee137 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -2,8 +2,6 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.IRootOtelContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public interface IHttpTransactionContext extends IScopedInstrumentationAttributes { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index 79a0596bc..bd4828602 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -8,6 +8,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.IWireCaptureContexts; import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; import java.io.IOException; @@ -32,31 +33,28 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob boolean shouldCapture, HttpRequest httpRequest) throws Exception { if (shouldCapture && shouldBlockPredicate.test(httpRequest)) { - rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.INTERNALLY_BLOCKED); + ((IWireCaptureContexts.IRequestContext)messageContext).onBlockingRequest(); + messageContext = messageContext.createBlockingContext(); trafficOffloader.flushCommitAndResetStream(false).whenComplete((result, t) -> { log.atInfo().setMessage(()->"Done flushing").log(); - messageContext.meterIncrementEvent(t != null ? "blockedFlushFailure" : "blockedFlushSuccess"); - messageContext.meterHistogramMicros( - t==null ? "blockedFlushFailure_micro" : "stream_flush_failure_micro"); - messageContext.endSpan(); // TODO - make this meter on create/close if (t != null) { // This is a spot where we would benefit from having a behavioral policy that different users // could set as needed. Some users may be fine with just logging a failed offloading of a request // where other users may want to stop entirely. JIRA here: https://opensearch.atlassian.net/browse/MIGRATIONS-1276 - log.atWarn().setCause(t).setMessage("Dropping request - Got error").log(); + log.atWarn().setCause(t) + .setMessage("Error offloading the request, but forwarding it to the service anyway").log(); ReferenceCountUtil.release(msg); - } else { - try { - super.channelFinishedReadingAnHttpMessage(ctx, msg, shouldCapture, httpRequest); - } catch (Exception e) { - throw Lombok.sneakyThrow(e); - } + messageContext.addException(t); + } + try { + super.channelFinishedReadingAnHttpMessage(ctx, msg, shouldCapture, httpRequest); + } catch (Exception e) { + throw Lombok.sneakyThrow(e); } }); } else { - messageContext.meterIncrementEvent("nonBlockingRequest"); - // TODO - log capturing vs non-capturing too + assert messageContext instanceof IWireCaptureContexts.IRequestContext; super.channelFinishedReadingAnHttpMessage(ctx, msg, shouldCapture, httpRequest); } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index 6d4dda0a3..d788e2a3f 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -25,6 +25,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.IWireCaptureContexts; import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; @@ -135,18 +136,16 @@ public HttpRequest resetCurrentRequest() { protected final EmbeddedChannel httpDecoderChannel; - protected WireCaptureContexts.HttpMessageContext messageContext; + protected IWireCaptureContexts.IHttpMessageContext messageContext; public LoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, String nodeId, String channelKey, @NonNull IConnectionCaptureFactory trafficOffloaderFactory, @NonNull RequestCapturePredicate httpHeadersCapturePredicate) throws IOException { - var parentContext = new ConnectionContext(rootContext, channelKey, nodeId); + var parentContext = rootContext.createConnectionContext(channelKey, nodeId); + this.messageContext = parentContext.createInitialRequestContext(); - this.messageContext = new WireCaptureContexts.HttpMessageContext(parentContext, 0, WireCaptureContexts.HttpMessageContext.HttpTransactionState.REQUEST); - messageContext.meterIncrementEvent("requestStarted"); - - this.trafficOffloader = trafficOffloaderFactory.createOffloader(rootContext, parentContext, channelKey); + this.trafficOffloader = trafficOffloaderFactory.createOffloader(parentContext); var captureState = new CaptureState(); httpDecoderChannel = new EmbeddedChannel( new SimpleHttpRequestDecoder(httpHeadersCapturePredicate.getHeadersRequiredForMatcher(), captureState), @@ -154,11 +153,8 @@ public LoggingHttpHandler(@NonNull IRootWireLoggingContext rootContext, String n ); } - protected void rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState nextState) { - messageContext = new WireCaptureContexts.HttpMessageContext(messageContext.getLogicalEnclosingScope(), - (nextState== WireCaptureContexts.HttpMessageContext.HttpTransactionState.REQUEST ? 1 : 0) - + messageContext.getSourceRequestIndex(), - nextState); + private IWireCaptureContexts.ICapturingConnectionContext getConnectionContext() { + return messageContext.getLogicalEnclosingScope(); } private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @@ -168,7 +164,7 @@ private SimpleDecodedHttpRequestHandler getHandlerThatHoldsParsedHttpRequest() { @Override public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { trafficOffloader.addCloseEvent(Instant.now()); - messageContext.meterIncrementEvent("unregistered"); + getConnectionContext().onUnregistered(); trafficOffloader.flushCommitAndResetStream(true).whenComplete((result, t) -> { if (t != null) { log.warn("Got error: " + t.getMessage()); @@ -185,7 +181,7 @@ public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { - messageContext.meterIncrementEvent("handlerRemoved"); + getConnectionContext().onRemoved(); messageContext.close(); messageContext.getLogicalEnclosingScope().close(); @@ -204,9 +200,9 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, boolean shouldCapture, HttpRequest httpRequest) throws Exception { - rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.WAITING); + assert messageContext instanceof IWireCaptureContexts.IRequestContext; + messageContext = messageContext.createWaitingForResponseContext(); super.channelRead(ctx, msg); - messageContext.meterIncrementEvent("requestReceived"); metricsLogger.atSuccess(MetricsEvent.RECEIVED_FULL_HTTP_REQUEST) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()) @@ -216,36 +212,36 @@ protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Ob @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - if (messageContext.getState() == WireCaptureContexts.HttpMessageContext.HttpTransactionState.RESPONSE) { - messageContext.endSpan(); // TODO - make this meter on create/close - rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.REQUEST); + IWireCaptureContexts.IRequestContext requestContext; + if (!(messageContext instanceof IWireCaptureContexts.IRequestContext)) { + messageContext = requestContext = messageContext.createNextRequestContext(); + } else { + requestContext = (IWireCaptureContexts.IRequestContext) messageContext; } + var timestamp = Instant.now(); var requestParsingHandler = getHandlerThatHoldsParsedHttpRequest(); var bb = ((ByteBuf) msg); httpDecoderChannel.writeInbound(bb.retainedDuplicate()); // the ByteBuf is consumed/release by this method - messageContext.meterIncrementEvent(getHandlerThatHoldsParsedHttpRequest().haveParsedFullRequest - ? "requestFullyParsed" : "requestPartiallyParsed"); - var captureState = requestParsingHandler.captureState; var shouldCapture = captureState.shouldCapture(); if (shouldCapture) { captureState.liveReadObservationsInOffloader = true; trafficOffloader.addReadEvent(timestamp, bb); } else if (captureState.liveReadObservationsInOffloader) { + requestContext.onCaptureSuppressed(); trafficOffloader.cancelCaptureForCurrentRequest(timestamp); captureState.liveReadObservationsInOffloader = false; } metricsLogger.atSuccess(MetricsEvent.RECEIVED_REQUEST_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); - messageContext.meterIncrementEvent("read"); - messageContext.meterIncrementEvent("readBytes", bb.readableBytes()); + requestContext.onBytesRead(bb.readableBytes()); if (requestParsingHandler.haveParsedFullRequest) { - messageContext.endSpan(); // TODO - make this meter on create/close + requestContext.onFullyParsedRequest(); var httpRequest = requestParsingHandler.resetCurrentRequest(); captureState.liveReadObservationsInOffloader = false; captureState.advanceStateModelIntoResponseGather(); @@ -267,18 +263,20 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { - if (messageContext.getState() != WireCaptureContexts.HttpMessageContext.HttpTransactionState.RESPONSE) { - messageContext.endSpan(); // TODO - make this meter on create/close - rotateNextMessageContext(WireCaptureContexts.HttpMessageContext.HttpTransactionState.RESPONSE); + IWireCaptureContexts.IResponseContext responseContext; + if (!(messageContext instanceof IWireCaptureContexts.IResponseContext)) { + messageContext = responseContext = messageContext.createResponseContext(); + } else { + responseContext = (IWireCaptureContexts.IResponseContext) messageContext; } + var bb = (ByteBuf) msg; if (getHandlerThatHoldsParsedHttpRequest().captureState.shouldCapture()) { trafficOffloader.addWriteEvent(Instant.now(), bb); } metricsLogger.atSuccess(MetricsEvent.RECEIVED_RESPONSE_COMPONENT) .setAttribute(MetricsAttributeKey.CHANNEL_ID, ctx.channel().id().asLongText()).emit(); - messageContext.meterIncrementEvent("write"); - messageContext.meterIncrementEvent("writeBytes", bb.readableBytes()); + responseContext.onBytesWritten(bb.readableBytes()); super.write(ctx, msg, promise); } @@ -286,7 +284,7 @@ public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { trafficOffloader.addExceptionCaughtEvent(Instant.now(), cause); - messageContext.meterIncrementEvent("exception"); + messageContext.addException(cause); httpDecoderChannel.close(); super.exceptionCaught(ctx, cause); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java index 01b314ea9..3be5b4d3e 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java @@ -2,11 +2,19 @@ import lombok.Getter; import org.opensearch.migrations.tracing.IRootOtelContext; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; +import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; +import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; -public interface IRootWireLoggingContext extends IRootOtelContext { - WireCaptureContexts.RequestContext.MetricInstruments getHttpRequestInstruments(); +public interface IRootWireLoggingContext extends IRootOffloaderContext { + + WireCaptureContexts.ConnectionContext.MetricInstruments getConnectionInstruments(); + + WireCaptureContexts.RequestContext.MetricInstruments getRequestInstruments(); WireCaptureContexts.BlockingContext.MetricInstruments getBlockingInstruments(); WireCaptureContexts.WaitingForResponseContext.MetricInstruments getWaitingForResponseInstruments(); WireCaptureContexts.ResponseContext.MetricInstruments getResponseInstruments(); + IWireCaptureContexts.ICapturingConnectionContext createConnectionContext(String channelKey, String nodeId); + } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java new file mode 100644 index 000000000..9f718b045 --- /dev/null +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java @@ -0,0 +1,78 @@ +package org.opensearch.migrations.trafficcapture.netty.tracing; + +import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; +import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; +import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; + +public abstract class IWireCaptureContexts { + + public static class MetricNames { + public static final String UNREGISTERED = "unregistered"; + public static final String REMOVED = "removed"; + + public static final String BLOCKING_REQUEST = "blockingRequest"; + public static final String CAPTURE_SUPPRESSED = "captureSuppressed"; + public static final String FULL_REQUEST = "fullRequest"; + public static final String BYTES_READ = "bytesRead"; + public static final String BYTES_WRITTEN = "bytesWritten"; + } + + public interface ICapturingConnectionContext + extends org.opensearch.migrations.tracing.commoncontexts.IConnectionContext { + IHttpMessageContext createInitialRequestContext(); + + void onUnregistered(); + + void onRemoved(); + } + + public interface IHttpMessageContext + extends IHttpTransactionContext, + IWithStartTimeAndAttributes, + IWithTypedEnclosingScope + { + IBlockingContext createBlockingContext(); + IWaitingForResponseContext createWaitingForResponseContext(); + IResponseContext createResponseContext(); + IRequestContext createNextRequestContext(); + } + + public interface IRequestContext extends IHttpMessageContext + { + String ACTIVITY_NAME = "gatheringRequest"; + default String getActivityName() { + return ACTIVITY_NAME; + } + + void onBlockingRequest(); + + void onCaptureSuppressed(); + + void onFullyParsedRequest(); + + void onBytesRead(int size); + } + + public interface IBlockingContext extends IHttpMessageContext { + String ACTIVITY_NAME = "blocked"; + default String getActivityName() { + return ACTIVITY_NAME; + } + } + + public interface IWaitingForResponseContext extends IHttpMessageContext { + String ACTIVITY_NAME = "waitingForResponse"; + default String getActivityName() { + return ACTIVITY_NAME; + } + } + + public interface IResponseContext extends IHttpMessageContext { + String ACTIVITY_NAME = "gatheringResponse"; + default String getActivityName() { + return ACTIVITY_NAME; + } + + void onBytesWritten(int size); + } +} diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java index 74db849b8..cbfa15f04 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java @@ -2,26 +2,34 @@ import io.opentelemetry.api.OpenTelemetry; import lombok.Getter; -import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; +import org.opensearch.migrations.tracing.RootOtelContext; -public class RootWireLoggingContext extends RootOffloaderContext implements IRootWireLoggingContext { +@Getter +public class RootWireLoggingContext extends RootOtelContext implements IRootWireLoggingContext { public static final String SCOPE_NAME = "NettyCapture"; - @Getter public final WireCaptureContexts.RequestContext.MetricInstruments httpRequestInstruments; - @Getter public final WireCaptureContexts.BlockingContext.MetricInstruments blockingInstruments; - @Getter public final WireCaptureContexts.WaitingForResponseContext.MetricInstruments waitingForResponseInstruments; - @Getter public final WireCaptureContexts.ResponseContext.MetricInstruments responseInstruments; + public final WireCaptureContexts.ConnectionContext.MetricInstruments connectionInstruments; + public final WireCaptureContexts.RequestContext.MetricInstruments requestInstruments; + public final WireCaptureContexts.BlockingContext.MetricInstruments blockingInstruments; + public final WireCaptureContexts.WaitingForResponseContext.MetricInstruments waitingForResponseInstruments; + public final WireCaptureContexts.ResponseContext.MetricInstruments responseInstruments; public RootWireLoggingContext(OpenTelemetry openTelemetry) { this(openTelemetry, SCOPE_NAME); } public RootWireLoggingContext(OpenTelemetry openTelemetry, String scopeName) { - super(openTelemetry); + super(scopeName, openTelemetry); var meter = this.getMeterProvider().get(scopeName); - httpRequestInstruments = new WireCaptureContexts.RequestContext.MetricInstruments(meter); + connectionInstruments = new WireCaptureContexts.ConnectionContext.MetricInstruments(meter); + requestInstruments = new WireCaptureContexts.RequestContext.MetricInstruments(meter); blockingInstruments = new WireCaptureContexts.BlockingContext.MetricInstruments(meter); waitingForResponseInstruments = new WireCaptureContexts.WaitingForResponseContext.MetricInstruments(meter); responseInstruments = new WireCaptureContexts.ResponseContext.MetricInstruments(meter); } + + @Override + public IWireCaptureContexts.ICapturingConnectionContext createConnectionContext(String channelKey, String nodeId) { + return new WireCaptureContexts.ConnectionContext(this, channelKey, nodeId); + } } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java index 3b631846c..a873b77ff 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -1,33 +1,115 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; +import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; import lombok.Getter; +import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.IWithStartTimeAndAttributes; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; -public class WireCaptureContexts { - private WireCaptureContexts() {} +public class WireCaptureContexts extends IWireCaptureContexts { + public static final String COUNT_UNITS = "count"; + public static final String BYTES_UNIT = "bytes"; - public static abstract class HttpMessageContext extends + + public static class ConnectionContext + extends org.opensearch.migrations.trafficcapture.tracing.ConnectionContext + implements IWireCaptureContexts.ICapturingConnectionContext + { + public ConnectionContext(IRootWireLoggingContext rootInstrumentationScope, String connectionId, String nodeId) { + super(rootInstrumentationScope, connectionId, nodeId); + } + + @Override + public IRootWireLoggingContext getRootInstrumentationScope() { + return (IRootWireLoggingContext) super.getRootInstrumentationScope(); + } + + public static class MetricInstruments + extends org.opensearch.migrations.trafficcapture.tracing.ConnectionContext.MetricInstruments { + public final LongCounter unregisteredCounter; + public final LongCounter removedCounter; + + public MetricInstruments(Meter meter) { + super(meter); + unregisteredCounter = meter + .counterBuilder(MetricNames.UNREGISTERED).setUnit(COUNT_UNITS).build(); + removedCounter = meter + .counterBuilder(MetricNames.REMOVED).setUnit(COUNT_UNITS).build(); + } + } + + @Override + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().getConnectionInstruments(); + } + + @Override + public IWireCaptureContexts.IHttpMessageContext createInitialRequestContext() { + return new RequestContext((RootWireLoggingContext)getRootInstrumentationScope(), + this, 0); + } + + @Override + public void onUnregistered() { + meterIncrementEvent(getMetrics().unregisteredCounter); + } + + @Override + public void onRemoved() { + meterIncrementEvent(getMetrics().removedCounter); + } + } + + @Getter + public abstract static class HttpMessageContext extends BaseNestedSpanContext - implements IHttpTransactionContext, IWithStartTimeAndAttributes { + implements IWireCaptureContexts.IHttpMessageContext { - @Getter final long sourceRequestIndex; - public HttpMessageContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, - long sourceRequestIndex) { + protected HttpMessageContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, + long sourceRequestIndex) { super(rootWireLoggingContext, enclosingScope); this.sourceRequestIndex = sourceRequestIndex; } - } - public static class RequestContext extends HttpMessageContext { - public static final String ACTIVITY_NAME = "gatheringRequest"; + @Override + public IWireCaptureContexts.ICapturingConnectionContext getLogicalEnclosingScope() { + return (IWireCaptureContexts.ICapturingConnectionContext) getEnclosingScope(); + } + + @Override + public IWireCaptureContexts.IBlockingContext createBlockingContext() { + close(); + return new BlockingContext(getRootInstrumentationScope(), getImmediateEnclosingScope(), sourceRequestIndex); + } + + @Override + public IWireCaptureContexts.IWaitingForResponseContext createWaitingForResponseContext() { + close(); + return new WaitingForResponseContext(getRootInstrumentationScope(), getImmediateEnclosingScope(), + sourceRequestIndex); + } + + @Override + public IWireCaptureContexts.IResponseContext createResponseContext() { + close(); + return new ResponseContext(getRootInstrumentationScope(), getImmediateEnclosingScope(), sourceRequestIndex); + } + @Override + public IWireCaptureContexts.IRequestContext createNextRequestContext() { + close(); + return new RequestContext(getRootInstrumentationScope(), getImmediateEnclosingScope(), + sourceRequestIndex+1); + } + } + + public static class RequestContext + extends HttpMessageContext + implements IWireCaptureContexts.IRequestContext { public RequestContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, long sourceRequestIndex) { @@ -35,22 +117,60 @@ public RequestContext(RootWireLoggingContext rootWireLoggingContext, } @Override - public String getActivityName() { - return ACTIVITY_NAME; + public IWireCaptureContexts.IWaitingForResponseContext createWaitingForResponseContext() { + return new WaitingForResponseContext(getRootInstrumentationScope(), getImmediateEnclosingScope(), + sourceRequestIndex); } - public static class MetricInstruments extends CommonScopedMetricInstruments { + + public static class MetricInstruments + extends org.opensearch.migrations.trafficcapture.tracing.ConnectionContext.MetricInstruments { + public final LongCounter blockingRequestCounter; + public final LongCounter requestsNotOffloadedCounter; + public final LongCounter fullyParsedRequestCounter; + public final LongCounter bytesReadCounter; + public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + super(meter); + blockingRequestCounter = meter + .counterBuilder(MetricNames.BLOCKING_REQUEST).setUnit(COUNT_UNITS).build(); + requestsNotOffloadedCounter = meter + .counterBuilder(MetricNames.CAPTURE_SUPPRESSED).setUnit(COUNT_UNITS).build(); + fullyParsedRequestCounter = meter + .counterBuilder(MetricNames.FULL_REQUEST).setUnit(COUNT_UNITS).build(); + bytesReadCounter = meter + .counterBuilder(MetricNames.BYTES_READ).setUnit(BYTES_UNIT).build(); } } @Override - public MetricInstruments getMetrics() { - return getRootInstrumentationScope().httpRequestInstruments; + public @NonNull MetricInstruments getMetrics() { + return getRootInstrumentationScope().getRequestInstruments(); + } + + @Override + public void onBlockingRequest() { + meterIncrementEvent(getMetrics().blockingRequestCounter); + } + + @Override + public void onCaptureSuppressed() { + meterIncrementEvent(getMetrics().requestsNotOffloadedCounter); + } + + @Override + public void onFullyParsedRequest() { + meterIncrementEvent(getMetrics().fullyParsedRequestCounter); + } + + @Override + public void onBytesRead(int size) { + meterIncrementEvent(getMetrics().bytesReadCounter, size); } } - public static class BlockingContext extends HttpMessageContext { + public static class BlockingContext + extends HttpMessageContext + implements IWireCaptureContexts.IBlockingContext { public static final String ACTIVITY_NAME = "blocked"; public BlockingContext(RootWireLoggingContext rootWireLoggingContext, @@ -71,11 +191,13 @@ public MetricInstruments(Meter meter) { @Override public RequestContext.MetricInstruments getMetrics() { - return getRootInstrumentationScope().httpRequestInstruments; + return getRootInstrumentationScope().requestInstruments; } } - public static class WaitingForResponseContext extends HttpMessageContext { + public static class WaitingForResponseContext + extends HttpMessageContext + implements IWireCaptureContexts.IWaitingForResponseContext { public static final String ACTIVITY_NAME = "waitingForResponse"; public WaitingForResponseContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, @@ -95,11 +217,13 @@ public MetricInstruments(Meter meter) { @Override public RequestContext.MetricInstruments getMetrics() { - return getRootInstrumentationScope().httpRequestInstruments; + return getRootInstrumentationScope().requestInstruments; } } - public static class ResponseContext extends HttpMessageContext { + public static class ResponseContext + extends HttpMessageContext + implements IWireCaptureContexts.IResponseContext { public static final String ACTIVITY_NAME = "gatheringResponse"; public ResponseContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, @@ -111,15 +235,25 @@ public ResponseContext(RootWireLoggingContext rootWireLoggingContext, public String getActivityName() { return ACTIVITY_NAME; } + public static class MetricInstruments extends CommonScopedMetricInstruments { + + private final LongCounter bytesWritten; public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); + bytesWritten = meter + .counterBuilder(MetricNames.BYTES_WRITTEN).setUnit(BYTES_UNIT).build(); } } @Override - public RequestContext.MetricInstruments getMetrics() { - return getRootInstrumentationScope().httpRequestInstruments; + public MetricInstruments getMetrics() { + return getRootInstrumentationScope().getResponseInstruments(); + } + + @Override + public void onBytesWritten(int size) { + meterIncrementEvent(getMetrics().bytesWritten, size); } } } diff --git a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java index 9a829c8b7..f4fe7b893 100644 --- a/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java +++ b/TrafficCapture/nettyWireLogging/src/test/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandlerTest.java @@ -90,7 +90,7 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumer offloader, + "n", "c", ctx -> offloader, new RequestCapturePredicate(), x->true)); // true: block every request channelWriter.accept(channel); @@ -109,7 +109,7 @@ private static void writeMessageAndVerify(byte[] fullTrafficBytes, Consumerto.hasRead()) + .filter(TrafficObservation::hasRead) .map(to->new ByteArrayInputStream(to.getRead().getData().toByteArray())) .collect(Collectors.toList()))); Assertions.assertArrayEquals(fullTrafficBytes, combinedTrafficPacketsSteam.readAllBytes()); @@ -165,7 +165,7 @@ public void testThatSuppressedCaptureWorks() throws Exception { var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", "uploader")); EmbeddedChannel channel = new EmbeddedChannel( new ConditionallyReliableLoggingHttpHandler(rootInstrumenter,"n", "c", - (ctx, connectionId) -> offloader, headerCapturePredicate, x->true)); + ctx -> offloader, headerCapturePredicate, x->true)); getWriter(false, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); channel.close(); var requestBytes = SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8); @@ -190,7 +190,7 @@ public void testThatHealthCheckCaptureCanBeSuppressed(boolean singleBytes) throw var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(Map.of("user-Agent", ".*uploader.*")); EmbeddedChannel channel = new EmbeddedChannel( new ConditionallyReliableLoggingHttpHandler(rootInstrumenter,"n", "c", - (ctx, connectionId) -> offloader, headerCapturePredicate, x->false)); + ctx -> offloader, headerCapturePredicate, x->false)); getWriter(singleBytes, true, SimpleRequests.HEALTH_CHECK.getBytes(StandardCharsets.UTF_8)).accept(channel); channel.writeOutbound(Unpooled.wrappedBuffer("response1".getBytes(StandardCharsets.UTF_8))); getWriter(singleBytes, true, SimpleRequests.SMALL_POST.getBytes(StandardCharsets.UTF_8)).accept(channel); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java index 09994d650..77ecd33ad 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java @@ -4,7 +4,7 @@ import org.opensearch.migrations.replay.util.TrafficChannelKeyFormatter; @EqualsAndHashCode() -abstract class PojoTrafficStreamKey implements ITrafficStreamKey { +public abstract class PojoTrafficStreamKey implements ITrafficStreamKey { protected final String nodeId; protected final String connectionId; protected final int trafficStreamIndex; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java index ab5de2d81..887ff16eb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java @@ -10,11 +10,10 @@ import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; + @EqualsAndHashCode(callSuper = true) public class PojoTrafficStreamKeyAndContext extends PojoTrafficStreamKey { - @Getter @Setter - @NonNull IReplayContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; public static PojoTrafficStreamKeyAndContext @@ -30,15 +29,20 @@ protected PojoTrafficStreamKeyAndContext(TrafficStream stream) { this(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); } - public static PojoTrafficStreamKeyAndContext build(String nodeId, String connectionId, int index, Function contextSupplier) { + public static PojoTrafficStreamKeyAndContext + build(String nodeId, String connectionId, int index, + Function contextSupplier) { var rval = new PojoTrafficStreamKeyAndContext(nodeId, connectionId, index); rval.setTrafficStreamsContext(contextSupplier.apply(rval)); return rval; } - protected PojoTrafficStreamKeyAndContext(String nodeId, String connectionId, int index) { + private PojoTrafficStreamKeyAndContext(String nodeId, String connectionId, int index) { super(nodeId, connectionId, index); } + @NonNull + public IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext() { + return trafficStreamsContext; + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index 01088398e..a74336db6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -216,7 +216,8 @@ public Optional getNextRequiredTouch() { var key = new TrafficStreamKeyWithKafkaRecordId( tsk -> { var channelKeyCtx = channelContextManager.retainOrCreateContext(tsk); - return new ReplayContexts.KafkaRecordContext(channelKeyCtx, kafkaRecord.key(), + return channelContextManager.getGlobalContext() + .createTrafficStreamContextForKafkaSource(channelKeyCtx, kafkaRecord.key(), kafkaRecord.serializedKeySize() + kafkaRecord.serializedValueSize()); }, ts, offsetData); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index 458f52155..78e96db00 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -8,6 +8,7 @@ import java.util.function.Function; public class ChannelContextManager implements Function { + @Getter private final RootReplayerContext globalContext; public ChannelContextManager(RootReplayerContext globalContext) { @@ -15,14 +16,14 @@ public ChannelContextManager(RootReplayerContext globalContext) { } private static class RefCountedContext { - @Getter final ReplayContexts.ChannelKeyContext context; + @Getter final IReplayContexts.IChannelKeyContext context; private int refCount; - private RefCountedContext(ReplayContexts.ChannelKeyContext context) { + private RefCountedContext(IReplayContexts.IChannelKeyContext context) { this.context = context; } - ReplayContexts.ChannelKeyContext retain() { + IReplayContexts.IChannelKeyContext retain() { refCount++; return context; } @@ -41,16 +42,16 @@ boolean release() { HashMap connectionToChannelContextMap = new HashMap<>(); - public ReplayContexts.ChannelKeyContext apply(ITrafficStreamKey tsk) { + public IReplayContexts.IChannelKeyContext apply(ITrafficStreamKey tsk) { return retainOrCreateContext(tsk); } - public ReplayContexts.ChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { + public IReplayContexts.IChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), k-> new RefCountedContext(globalContext.createChannelContext(tsk))).retain(); } - public ReplayContexts.ChannelKeyContext releaseContextFor(ReplayContexts.ChannelKeyContext ctx) { + public IReplayContexts.IChannelKeyContext releaseContextFor(IReplayContexts.IChannelKeyContext ctx) { var connId = ctx.getConnectionId(); var refCountedCtx = connectionToChannelContextMap.get(connId); assert ctx == refCountedCtx.context; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index a08c98a1b..0c5010de5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -12,18 +12,7 @@ import java.time.Instant; -public class IReplayContexts { -// -// public static class ScopeNames { -// private ScopeNames() {} -// -// public static final String KAFKA_RECORD_SCOPE = "KafkaRecord"; -// public static final String TRAFFIC_STREAM_LIFETIME_SCOPE = "TrafficStreamLifetime"; -// public static final String ACCUMULATOR_SCOPE = "Accumulator"; -// public static final String HTTP_TRANSFORMER_SCOPE = "HttpTransformer"; -// public static final String REQUEST_SENDER_SCOPE = "RequestSender"; -// public static final String TRAFFIC_REPLAYER_SCOPE = "TrafficReplayer"; -// } +public abstract class IReplayContexts { public static class ActivityNames { private ActivityNames() {} diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 67a799c15..b52992262 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -9,6 +9,7 @@ import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NonNull; +import lombok.Setter; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; @@ -22,9 +23,18 @@ public class KafkaConsumerContexts { private KafkaConsumerContexts() {} - @AllArgsConstructor public static class AsyncListeningContext implements IKafkaConsumerContexts.IAsyncListeningContext { + @Getter + @NonNull + public final RootReplayerContext enclosingScope; + @Getter @Setter + Exception observedExceptionToIncludeInMetrics; + + public AsyncListeningContext(@NonNull RootReplayerContext enclosingScope) { + this.enclosingScope = enclosingScope; + } + public static class MetricInstruments { public final LongCounter kafkaPartitionsRevokedCounter; public final LongCounter kafkaPartitionsAssignedCounter; @@ -39,10 +49,6 @@ public MetricInstruments(Meter meter) { } } - @Getter - @NonNull - public final RootReplayerContext enclosingScope; - private @NonNull MetricInstruments getMetrics() { return enclosingScope.asyncListeningInstruments; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index b3643872a..dd13c833a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -4,7 +4,6 @@ import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; import lombok.NonNull; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; @@ -19,13 +18,11 @@ import java.time.Duration; import java.time.Instant; -public class ReplayContexts { +public abstract class ReplayContexts extends IReplayContexts { public static final String COUNT_UNIT_STR = "count"; public static final String BYTES_UNIT_STR = "bytes"; - private ReplayContexts() {} - public static class ChannelKeyContext extends BaseNestedSpanContext implements IReplayContexts.IChannelKeyContext { @@ -45,7 +42,7 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); activeChannelCounter = meter - .upDownCounterBuilder(IReplayContexts.MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); + .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); } } @@ -69,27 +66,33 @@ public void onConnectionClosed() { } public static class KafkaRecordContext - extends DirectNestedSpanContext + extends BaseNestedSpanContext implements IReplayContexts.IKafkaRecordContext { final String recordId; - public KafkaRecordContext(ChannelKeyContext enclosingScope, String recordId, int recordSize) { - super(enclosingScope); + public KafkaRecordContext(RootReplayerContext rootReplayerContext, + IChannelKeyContext enclosingScope, String recordId, int recordSize) { + super(rootReplayerContext, enclosingScope); this.recordId = recordId; initializeSpan(); meterIncrementEvent(getMetrics().recordCounter); meterIncrementEvent(getMetrics().bytesCounter, recordSize); } + @Override + public IChannelKeyContext getLogicalEnclosingScope() { + return (IChannelKeyContext) getEnclosingScope(); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { final LongCounter recordCounter; final LongCounter bytesCounter; public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); - recordCounter = meter.counterBuilder(IReplayContexts.MetricNames.KAFKA_RECORD_READ) + recordCounter = meter.counterBuilder(MetricNames.KAFKA_RECORD_READ) .setUnit("records").build(); - bytesCounter = meter.counterBuilder(IReplayContexts.MetricNames.KAFKA_BYTES_READ) + bytesCounter = meter.counterBuilder(MetricNames.KAFKA_BYTES_READ) .setUnit(BYTES_UNIT_STR).build(); } } @@ -106,7 +109,8 @@ public String getRecordId() { @Override public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficLifecyleContext(ITrafficStreamKey tsk) { - return new ReplayContexts.TrafficStreamsLifecycleContext(this, tsk); + return new ReplayContexts.TrafficStreamsLifecycleContext(this.getRootInstrumentationScope(), this, tsk + ); } } @@ -115,31 +119,21 @@ public static class TrafficStreamsLifecycleContext implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; - protected TrafficStreamsLifecycleContext(IInstrumentationAttributes enclosingScope, - ITrafficStreamKey trafficStreamKey, - RootReplayerContext rootScope) { + protected TrafficStreamsLifecycleContext(RootReplayerContext rootScope, + IInstrumentationAttributes enclosingScope, + ITrafficStreamKey trafficStreamKey) { super(rootScope, enclosingScope); this.trafficStreamKey = trafficStreamKey; initializeSpan(); meterIncrementEvent(getMetrics().streamsRead); } - public TrafficStreamsLifecycleContext(KafkaRecordContext enclosingScope, - ITrafficStreamKey trafficStreamKey) { - this(enclosingScope, trafficStreamKey, enclosingScope.getRootInstrumentationScope()); - } - - protected TrafficStreamsLifecycleContext(ChannelKeyContext enclosingScope, - ITrafficStreamKey trafficStreamKey) { - this(enclosingScope, trafficStreamKey, enclosingScope.getRootInstrumentationScope()); - } - public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter streamsRead; public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); - streamsRead = meter.counterBuilder(IReplayContexts.MetricNames.TRAFFIC_STREAMS_READ) + streamsRead = meter.counterBuilder(MetricNames.TRAFFIC_STREAMS_READ) .setUnit("objects").build(); } } @@ -312,37 +306,37 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); - headerParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_HEADER_PARSE) + headerParses = meter.counterBuilder(MetricNames.TRANSFORM_HEADER_PARSE) .setUnit(COUNT_UNIT_STR).build(); - payloadParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED) + payloadParses = meter.counterBuilder(MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED) .setUnit(COUNT_UNIT_STR).build(); - payloadSuccessParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_PARSE_SUCCESS) + payloadSuccessParses = meter.counterBuilder(MetricNames.TRANSFORM_PAYLOAD_PARSE_SUCCESS) .setUnit(COUNT_UNIT_STR).build(); - jsonPayloadParses = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_JSON_REQUIRED) + jsonPayloadParses = meter.counterBuilder(MetricNames.TRANSFORM_JSON_REQUIRED) .setUnit(COUNT_UNIT_STR).build(); - jsonTransformSuccess = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_JSON_SUCCEEDED) + jsonTransformSuccess = meter.counterBuilder(MetricNames.TRANSFORM_JSON_SUCCEEDED) .setUnit(COUNT_UNIT_STR).build(); - payloadBytesIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_PAYLOAD_BYTES_IN) + payloadBytesIn = meter.counterBuilder(MetricNames.TRANSFORM_PAYLOAD_BYTES_IN) .setUnit(BYTES_UNIT_STR).build(); - uncompressedBytesIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_IN) + uncompressedBytesIn = meter.counterBuilder(MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_IN) .setUnit(BYTES_UNIT_STR).build(); - uncompressedBytesOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_OUT) + uncompressedBytesOut = meter.counterBuilder(MetricNames.TRANSFORM_UNCOMPRESSED_BYTES_OUT) .setUnit(BYTES_UNIT_STR).build(); - finalPayloadBytesOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_FINAL_PAYLOAD_BYTES_OUT) + finalPayloadBytesOut = meter.counterBuilder(MetricNames.TRANSFORM_FINAL_PAYLOAD_BYTES_OUT) .setUnit(BYTES_UNIT_STR).build(); - transformSuccess = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_SUCCESS) + transformSuccess = meter.counterBuilder(MetricNames.TRANSFORM_SUCCESS) .setUnit(COUNT_UNIT_STR).build(); - transformSkipped = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_SKIPPED) + transformSkipped = meter.counterBuilder(MetricNames.TRANSFORM_SKIPPED) .setUnit(COUNT_UNIT_STR).build(); - transformError = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_ERROR) + transformError = meter.counterBuilder(MetricNames.TRANSFORM_ERROR) .setUnit(COUNT_UNIT_STR).build(); - transformBytesIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_BYTES_IN) + transformBytesIn = meter.counterBuilder(MetricNames.TRANSFORM_BYTES_IN) .setUnit(BYTES_UNIT_STR).build(); - transformChunksIn = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_IN) + transformChunksIn = meter.counterBuilder(MetricNames.TRANSFORM_CHUNKS_IN) .setUnit(COUNT_UNIT_STR).build(); - transformBytesOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_BYTES_OUT) + transformBytesOut = meter.counterBuilder(MetricNames.TRANSFORM_BYTES_OUT) .setUnit(BYTES_UNIT_STR).build(); - transformChunksOut = meter.counterBuilder(IReplayContexts.MetricNames.TRANSFORM_CHUNKS_OUT) + transformChunksOut = meter.counterBuilder(MetricNames.TRANSFORM_CHUNKS_OUT) .setUnit(COUNT_UNIT_STR).build(); } @@ -413,7 +407,7 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { DoubleHistogram lag; public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); - lag = meter.histogramBuilder(IReplayContexts.MetricNames.NETTY_SCHEDULE_LAG).setUnit("ms").build(); + lag = meter.histogramBuilder(MetricNames.NETTY_SCHEDULE_LAG).setUnit("ms").build(); } } @@ -446,11 +440,11 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { public MetricInstruments(Meter meter) { super(meter, ACTIVITY_NAME); - sourceTargetGap = meter.histogramBuilder(IReplayContexts.MetricNames.SOURCE_TO_TARGET_REQUEST_LAG) + sourceTargetGap = meter.histogramBuilder(MetricNames.SOURCE_TO_TARGET_REQUEST_LAG) .setUnit("ms").build(); - bytesWritten = meter.counterBuilder(IReplayContexts.MetricNames.BYTES_WRITTEN_TO_TARGET) + bytesWritten = meter.counterBuilder(MetricNames.BYTES_WRITTEN_TO_TARGET) .setUnit(BYTES_UNIT_STR).build(); - bytesRead = meter.counterBuilder(IReplayContexts.MetricNames.BYTES_READ_FROM_TARGET) + bytesRead = meter.counterBuilder(MetricNames.BYTES_READ_FROM_TARGET) .setUnit(BYTES_UNIT_STR).build(); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 30cc6dcf9..db1eae243 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.OpenTelemetry; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.tracing.RootOtelContext; import lombok.Getter; @@ -68,11 +69,24 @@ public TrafficSourceContexts.ReadChunkContext createReadChunkContext() { return new TrafficSourceContexts.ReadChunkContext(this, this); } - public ReplayContexts.ChannelKeyContext createChannelContext(ITrafficStreamKey tsk) { + public IReplayContexts.IChannelKeyContext createChannelContext(ITrafficStreamKey tsk) { return new ReplayContexts.ChannelKeyContext(this, this, tsk); } public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { return new KafkaConsumerContexts.CommitScopeContext(this, this); } + + public IReplayContexts.ITrafficStreamsLifecycleContext + createTrafficStreamContextForStreamSource(IReplayContexts.IChannelKeyContext channelCtx, + ITrafficStreamKey tsk) { + return new InputStreamOfTraffic.IOSTrafficStreamContext(this, channelCtx, tsk); + } + + public IReplayContexts.IKafkaRecordContext + createTrafficStreamContextForKafkaSource(IReplayContexts.IChannelKeyContext channelCtx, + String recordId, + int kafkaRecordSize) { + return new ReplayContexts.KafkaRecordContext(this, channelCtx, recordId, kafkaRecordSize); + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 6e0e4f919..675a3adfc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -89,8 +89,8 @@ public void stopReadsPast(Instant pointInTime) { * that barrier isn't meant to be a tight barrier with immediate effect. */ public CompletableFuture> - readNextTrafficStreamChunk(Supplier commitContextSupplier) { - var readContext = commitContextSupplier.get(); + readNextTrafficStreamChunk(Supplier readChunkContextSupplier) { + var readContext = readChunkContextSupplier.get(); log.info("BlockingTrafficSource::readNext"); var trafficStreamListFuture = CompletableFuture .supplyAsync(() -> blockIfNeeded(readContext), task -> new Thread(task).start()) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java index cd9d6426f..89353ebe0 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ITrafficCaptureSource.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.replay.traffic.source; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import java.io.Closeable; @@ -10,7 +9,6 @@ import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; -import java.util.function.Function; import java.util.function.Supplier; public interface ITrafficCaptureSource extends Closeable { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 072b5b263..d9e1baa02 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -6,7 +6,7 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; -import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; @@ -18,7 +18,6 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Function; import java.util.function.Supplier; @Slf4j @@ -33,8 +32,10 @@ public InputStreamOfTraffic(RootReplayerContext context, InputStream inputStream } public static final class IOSTrafficStreamContext extends ReplayContexts.TrafficStreamsLifecycleContext { - private IOSTrafficStreamContext(ReplayContexts.ChannelKeyContext enclosingScope, ITrafficStreamKey trafficStreamKey) { - super(enclosingScope, trafficStreamKey); + public IOSTrafficStreamContext(RootReplayerContext rootReplayerContext, + IReplayContexts.IChannelKeyContext enclosingScope, + ITrafficStreamKey trafficStreamKey) { + super(rootReplayerContext, enclosingScope, trafficStreamKey); } } @@ -60,7 +61,8 @@ private IOSTrafficStreamContext(ReplayContexts.ChannelKeyContext enclosingScope, return List.of(new PojoTrafficStreamAndKey(ts, PojoTrafficStreamKeyAndContext.build(ts, tsk-> { var channelCtx = channelContextManager.retainOrCreateContext(tsk); - return new IOSTrafficStreamContext(channelCtx, tsk); + return channelContextManager.getGlobalContext() + .createTrafficStreamContextForStreamSource(channelCtx, tsk); }))); }).exceptionally(e->{ var ecf = new CompletableFuture>(); @@ -72,6 +74,7 @@ private IOSTrafficStreamContext(ReplayContexts.ChannelKeyContext enclosingScope, @Override public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { // do nothing - this datasource isn't transactional + channelContextManager.releaseContextFor(trafficStreamKey.getTrafficStreamsContext().getLogicalEnclosingScope()); return CommitResult.Immediate; } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index c7680e3dc..57ac36964 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -7,6 +7,7 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; @@ -29,6 +30,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; @Slf4j @WrapWithNettyLeakDetection(disableLeakChecks = true) @@ -38,6 +40,7 @@ class BlockingTrafficSourceTest { @Test void readNextChunkTest() throws Exception { + final var rootContext = TestContext.noTracking(); var nStreamsToCreate = 210; var BUFFER_MILLIS = 10; var testSource = new TestTrafficCaptureSource(nStreamsToCreate); @@ -46,7 +49,7 @@ void readNextChunkTest() throws Exception { blockingSource.stopReadsPast(sourceStartTime.plus(Duration.ofMillis(0))); var firstChunk = new ArrayList(); for (int i = 0; i<=BUFFER_MILLIS+SHIFT; ++i) { - var nextPieceFuture = blockingSource.readNextTrafficStreamChunk(TestContext.noTracking()); + var nextPieceFuture = blockingSource.readNextTrafficStreamChunk(rootContext::createReadChunkContext); nextPieceFuture.get(500000, TimeUnit.MILLISECONDS) .forEach(ts->firstChunk.add(ts)); } @@ -54,9 +57,10 @@ void readNextChunkTest() throws Exception { Assertions.assertTrue(BUFFER_MILLIS+SHIFT <= firstChunk.size()); Instant lastTime = null; for (int i =SHIFT; i pQueue = new PriorityQueue<>(); Integer cursorHighWatermark; ArrayCursorTrafficSourceFactory arrayCursorTrafficSourceFactory; + TestContext rootContext; public ArrayCursorTrafficCaptureSource(ArrayCursorTrafficSourceFactory arrayCursorTrafficSourceFactory) { var startingCursor = arrayCursorTrafficSourceFactory.nextReadCursor.get(); @@ -277,17 +281,19 @@ public ArrayCursorTrafficCaptureSource(ArrayCursorTrafficSourceFactory arrayCurs this.readCursor = new AtomicInteger(startingCursor); this.arrayCursorTrafficSourceFactory = arrayCursorTrafficSourceFactory; cursorHighWatermark = startingCursor; + rootContext = TestContext.noTracking(); } @Override - public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { + public CompletableFuture> + readNextTrafficStreamChunk(Supplier contextSupplier) { var idx = readCursor.getAndIncrement(); log.info("reading chunk from index="+idx); if (arrayCursorTrafficSourceFactory.trafficStreamsList.size() <= idx) { return CompletableFuture.failedFuture(new EOFException()); } var stream = arrayCursorTrafficSourceFactory.trafficStreamsList.get(idx); - var key = new TrafficStreamCursorKey(context, stream, idx); + var key = new TrafficStreamCursorKey(rootContext, stream, idx); synchronized (pQueue) { pQueue.add(key); cursorHighWatermark = idx; @@ -296,7 +302,7 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public CommitResult commitTrafficStream(IInstrumentationAttributes ctx, ITrafficStreamKey trafficStreamKey) { + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { synchronized (pQueue) { // figure out if I need to do something more efficient later log.info("Commit called for "+trafficStreamKey+" with pQueue.size="+pQueue.size()); var incomingCursor = ((TrafficStreamCursorKey)trafficStreamKey).arrayIndex; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index 2a8e1d18a..521a58bd0 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -159,17 +159,18 @@ Producer buildKafkaProducer() { } private Supplier - loadStreamsToKafkaFromCompressedFile(IInstrumentationAttributes context, + loadStreamsToKafkaFromCompressedFile(TestContext rootCtx, KafkaConsumer kafkaConsumer, String filename, int recordCount) throws Exception { var kafkaProducer = buildKafkaProducer(); loadStreamsAsynchronouslyWithCloseableResource(kafkaConsumer, - new V0_1TrafficCaptureSource(context, filename), + new V0_1TrafficCaptureSource(rootCtx, filename), originalTrafficSource -> { try { for (int i = 0; i < recordCount; ++i) { List chunks = null; - chunks = originalTrafficSource.readNextTrafficStreamChunk(TestContext.noTracking()).get(); + chunks = originalTrafficSource.readNextTrafficStreamChunk(rootCtx::createReadChunkContext) + .get(); for (int j = 0; j < chunks.size(); ++j) { KafkaTestUtils.writeTrafficStreamRecord(kafkaProducer, chunks.get(j).getStream(), TEST_TOPIC_NAME, "KEY_" + i + "_" + j); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java index 703d3f196..7e91b29ac 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java @@ -2,6 +2,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.IInstrumentationAttributes; @@ -12,6 +13,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Supplier; import java.util.stream.Collectors; @Slf4j @@ -26,11 +28,12 @@ public SentinelSensingTrafficSource(ISimpleTrafficCaptureSource underlyingSource } @Override - public CompletableFuture> readNextTrafficStreamChunk(IInstrumentationAttributes context) { + public CompletableFuture> + readNextTrafficStreamChunk(Supplier contextSupplier) { if (stopReadingRef.get()) { return CompletableFuture.failedFuture(new EOFException()); } - return underlyingSource.readNextTrafficStreamChunk(context).thenApply(v->{ + return underlyingSource.readNextTrafficStreamChunk(contextSupplier).thenApply(v->{ if (v != null) { return v.stream().takeWhile(ts->{ var isSentinel = ts.getStream().getConnectionId().equals(SENTINEL_CONNECTION_ID); @@ -46,9 +49,8 @@ public CompletableFuture> readNextTrafficStreamChunk } @Override - public CommitResult commitTrafficStream(IInstrumentationAttributes context, - ITrafficStreamKey trafficStreamKey) throws IOException { - return underlyingSource.commitTrafficStream(context, trafficStreamKey); + public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) throws IOException { + return underlyingSource.commitTrafficStream(trafficStreamKey); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index c653f775c..4df76b056 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -13,10 +13,12 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.TestContext; @@ -24,7 +26,6 @@ import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; -import org.opensearch.migrations.trafficcapture.tracing.RootOffloaderContext; import java.io.IOException; import java.time.Duration; @@ -94,7 +95,7 @@ public String toString() { } } - static class TestRootContext extends RootOffloaderContext { + static class TestRootContext extends RootOtelContext { public TestRootContext() { super(null); } @@ -122,11 +123,11 @@ static ByteBuf makeSequentialByteBuf(int offset, int size) { } static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, AtomicInteger uniqueIdCounter, - List directives) throws Exception { + List directives, TestContext rootContext) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); - var offloader = connectionFactory.createOffloader(new ConnectionContext(new TestRootContext(), - "n", "test"), - "TEST_"+uniqueIdCounter.incrementAndGet()); + var tsk = PojoTrafficStreamKeyAndContext.build("n", "test", uniqueIdCounter.incrementAndGet(), + k->rootContext.createChannelContext(k).getChannelKey().getTrafficStreamsContext()); + var offloader = connectionFactory.createOffloader(TestContext.noTracking().createChannelContext(tsk)); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); } @@ -200,7 +201,7 @@ void generateAndTest(String testName, int bufferSize, int skipCount, List directives, List expectedSizes) throws Exception { var context = TestContext.noTracking(); var trafficStreams = Arrays.stream(makeTrafficStreams(bufferSize, 0, new AtomicInteger(), - directives)).skip(skipCount); + directives, TestContext.noTracking())).skip(skipCount); List reconstructedTransactions = new ArrayList<>(); AtomicInteger requestsReceived = new AtomicInteger(0); accumulateTrafficStreamsWithNewAccumulator(context, trafficStreams, reconstructedTransactions, requestsReceived); @@ -217,7 +218,7 @@ void generateAndTest(String testName, int bufferSize, int skipCount, * @return */ static SortedSet - accumulateTrafficStreamsWithNewAccumulator(IInstrumentationAttributes context, + accumulateTrafficStreamsWithNewAccumulator(RootReplayerContext context, Stream trafficStreams, List aggregations, AtomicInteger requestsReceived) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index a338cf808..5d144e0f4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -112,12 +112,13 @@ private static Timestamp getProtobufTimestamp(Instant t) { public void testDelimitedDeserializer() throws Exception { final Instant timestamp = Instant.now(); byte[] serializedChunks = synthesizeTrafficStreamsIntoByteArray(timestamp, 3); + var rootContext = TestContext.noTracking(); try (var bais = new ByteArrayInputStream(serializedChunks)) { AtomicInteger counter = new AtomicInteger(0); var allMatch = new AtomicBoolean(true); - try (var trafficProducer = new InputStreamOfTraffic(TestContext.noTracking(), bais)) { + try (var trafficProducer = new InputStreamOfTraffic(rootContext, bais)) { while (true) { - trafficProducer.readNextTrafficStreamChunk(TestContext.noTracking()).get().stream() + trafficProducer.readNextTrafficStreamChunk(rootContext::createReadChunkContext).get().stream() .forEach(ts->{ var i = counter.incrementAndGet(); var expectedStream = makeTrafficStream(timestamp.plus(i - 1, ChronoUnit.SECONDS), i); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java index 62fc5bb3e..bb413221d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java @@ -4,6 +4,7 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.testutils.StreamInterleaver; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -223,7 +224,7 @@ private static void fillCommandsAndSizes(Random r, double cancelRequestLikelihoo var flushLikelihood = Math.pow(r2.nextDouble(),2.0); fillCommandsAndSizes(r2, flushLikelihood/4, flushLikelihood, bufferBound, commands, sizes); return SimpleCapturedTrafficToHttpTransactionAccumulatorTest.makeTrafficStreams(bufferSize, (int) rSeed, - uniqueIdCounter, commands); + uniqueIdCounter, commands, TestContext.noTracking()); } /** diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java index 587bc0ec2..9cf64bac5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -13,7 +14,7 @@ public class V0_1TrafficCaptureSource extends CompressedFileTrafficCaptureSource protected final HashMap connectionProgressMap; - public V0_1TrafficCaptureSource(IInstrumentationAttributes context, String filename) throws IOException { + public V0_1TrafficCaptureSource(RootReplayerContext context, String filename) throws IOException { super(context, filename); connectionProgressMap = new HashMap<>(); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java index d9c36bc9a..f3d593184 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java @@ -1,13 +1,5 @@ package org.opensearch.migrations.replay.kafka; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; -import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; -import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import lombok.Lombok; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -56,6 +48,7 @@ private KafkaConsumer buildKafkaConsumer() { @Test @Tag("longTest") public void testThatCommitsAndReadsKeepWorking() throws Exception { + final var rootContext = TestContext.noTracking(); var kafkaSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS/3)); var blockingSource = new BlockingTrafficSource(kafkaSource, Duration.ofMinutes(5)); @@ -76,7 +69,7 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { var ts = chunks.get(0); Thread.sleep(DEFAULT_POLL_INTERVAL_MS*2); log.info("committing "+ts.getKey()); - blockingSource.commitTrafficStream(TestContext.noTracking(), ts.getKey()); + blockingSource.commitTrafficStream(ts.getKey()); blockingSource.stopReadsPast(getTimeAtPoint(i)); } } catch (Exception e) { @@ -86,7 +79,7 @@ public void testThatCommitsAndReadsKeepWorking() throws Exception { for (int i=0; i(); log.atInfo().setMessage(()->"re-establish... 3 ..."+renderNextCommitsAsString()).log(); readNextNStreams(trafficSource, keysReceived, 0, 1); - trafficSource.commitTrafficStream(TestContext.noTracking(), keysReceivedUntilDrop1.get(1)); + trafficSource.commitTrafficStream(keysReceivedUntilDrop1.get(1)); log.atInfo().setMessage(()->"re-establish... 4 ..."+renderNextCommitsAsString()).log(); readNextNStreams(trafficSource, keysReceived, 1, 1); log.atInfo().setMessage(()->"5 ..."+renderNextCommitsAsString()).log(); @@ -158,8 +158,9 @@ private String renderNextCommitsAsString() { private static void readNextNStreams(BlockingTrafficSource kafkaSource, List keysReceived, int from, int count) { Assertions.assertEquals(from, keysReceived.size()); + final var rootContext = TestContext.noTracking(); for (int i=0; i{ var tsk = ts.getKey(); log.atInfo().setMessage(()->"checking for "+tsk).log(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index f0022d1d4..a283b5e79 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -36,6 +36,7 @@ public class KafkaTrafficCaptureSourceLongTermTest { @Tag("longTest") public void testTrafficCaptureSource() throws Exception { String testTopicName = "TEST_TOPIC"; + final var rootContext = TestContext.noTracking(); var kafkaConsumerProps = KafkaTrafficCaptureSource.buildKafkaProperties(embeddedKafkaBroker.getBootstrapServers(), TEST_GROUP_CONSUMER_ID, false, null); @@ -60,7 +61,7 @@ public void testTrafficCaptureSource() throws Exception { for (int i=0; i { - var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(TestContext.noTracking()) + var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(rootContext::createReadChunkContext) .get(1, TimeUnit.SECONDS); if (rogueChunk.isEmpty()) { // TimeoutExceptions cannot be thrown by the supplier of the CompletableFuture today, BUT we diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index dfe645ebe..3d934d613 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -43,20 +43,22 @@ class KafkaTrafficCaptureSourceTest { @Test public void testRecordToString() { + final var rootContext = TestContext.noTracking(); var ts = TrafficStream.newBuilder() .setConnectionId("c") .setNodeId("n") .setNumber(7) .build(); var tsk = new TrafficStreamKeyWithKafkaRecordId( - k -> new ReplayContexts.KafkaRecordContext( - new ChannelContextManager(TestContext.noTracking()).retainOrCreateContext(k), "", 1), + k -> new ReplayContexts.KafkaRecordContext(rootContext, + new ChannelContextManager(rootContext).retainOrCreateContext(k), "", 1), ts, 1, 2, 123); Assertions.assertEquals("n.c.7|partition=2|offset=123", tsk.toString()); } @Test public void testSupplyTrafficFromSource() { + final var rootContext = TestContext.noTracking(); int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.noTracking(), @@ -79,7 +81,7 @@ public void testSupplyTrafficFromSource() { var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { - protobufConsumer.readNextTrafficStreamChunk(TestContext.noTracking()).get().stream() + protobufConsumer.readNextTrafficStreamChunk(rootContext::createReadChunkContext).get().stream() .forEach(streamWithKey -> { tsCount.incrementAndGet(); log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); @@ -127,11 +129,11 @@ public void testSupplyTrafficWithUnformattedMessages() { // This assertion will fail the test case if not completed within its duration, as would be the case if there // were missing traffic streams. Its task currently is limited to the numTrafficStreams where it will stop the stream - + final var rootContext = TestContext.noTracking(); var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { - protobufConsumer.readNextTrafficStreamChunk(TestContext.noTracking()).get().stream() + protobufConsumer.readNextTrafficStreamChunk(rootContext::createReadChunkContext).get().stream() .forEach(streamWithKey->{ tsCount.incrementAndGet(); log.trace("Stream has substream count: " + streamWithKey.getStream().getSubStreamCount()); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java index b13c17d85..ac6e0531f 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java @@ -1,21 +1,14 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; - -import java.time.Instant; class TestTrafficStreamsLifecycleContext extends ReplayContexts.TrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; public TestTrafficStreamsLifecycleContext(RootReplayerContext rootContext, ITrafficStreamKey tsk) { - super(new ReplayContexts.ChannelKeyContext(rootContext, rootContext, tsk), tsk, rootContext); + super(rootContext, new ReplayContexts.ChannelKeyContext(rootContext, rootContext, tsk), tsk); this.trafficStreamKey = tsk; initializeSpan(); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index 43555e813..c1984ecf1 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -6,7 +6,7 @@ public class TestContext extends RootReplayerContext { - private final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; + public final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; public static TestContext withTracking() { return new TestContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), From 5f6bb3faaadfa48f0436398deedbfb0a86cef44e Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 12 Jan 2024 23:17:12 -0500 Subject: [PATCH 58/94] Fix the last of the compilation errors though tests are failing still. Signed-off-by: Greg Schohn --- .../proxyserver/CaptureProxy.java | 12 +++++++----- .../proxyserver/RootCaptureContext.java | 18 ++++++++++++++++++ 2 files changed, 25 insertions(+), 5 deletions(-) create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 9cf36c843..edd172c9a 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -187,7 +187,7 @@ private static Settings getSettings(@NonNull String configFile) { private static IConnectionCaptureFactory getNullConnectionCaptureFactory() { System.err.println("No trace log directory specified. Logging to /dev/null"); - return (ctx,connectionId) -> new StreamChannelConnectionCaptureSerializer<>(null, connectionId, + return ctx -> new StreamChannelConnectionCaptureSerializer<>(null, ctx.getConnectionId(), new StreamLifecycleManager<>() { @Override public void close() {} @@ -238,13 +238,15 @@ static Properties buildKafkaProperties(Parameters params) throws IOException { return kafkaProps; } - private static IConnectionCaptureFactory getConnectionCaptureFactory(Parameters params) throws IOException { + private static IConnectionCaptureFactory + getConnectionCaptureFactory(Parameters params, RootCaptureContext rootContext) throws IOException { var nodeId = getNodeId(); // Resist the urge for now though until it comes in as a request/need. if (params.traceDirectory != null) { return new FileConnectionCaptureFactory(nodeId, params.traceDirectory, params.maximumTrafficStreamSize); } else if (params.kafkaConnection != null) { - return new KafkaCaptureFactory(nodeId, new KafkaProducer<>(buildKafkaProperties(params)), params.maximumTrafficStreamSize); + return new KafkaCaptureFactory(rootContext, + nodeId, new KafkaProducer<>(buildKafkaProperties(params)), params.maximumTrafficStreamSize); } else if (params.noCapture) { return getNullConnectionCaptureFactory(); } else { @@ -304,7 +306,7 @@ public static void main(String[] args) throws InterruptedException, IOException var params = parseArgs(args); var backsideUri = convertStringToUri(params.backsideUriString); - var rootContext = new RootWireLoggingContext( + var rootContext = new RootCaptureContext( RootOtelContext.initializeOpenTelemetry(params.otelCollectorEndpoint, "capture")); var sksOp = Optional.ofNullable(params.sslConfigFilePath) @@ -329,7 +331,7 @@ public static void main(String[] args) throws InterruptedException, IOException var headerCapturePredicate = new HeaderValueFilteringCapturePredicate(convertPairListToMap(params.suppressCaptureHeaderPairs)); proxy.start(rootContext, backsideConnectionPool, params.numThreads, sslEngineSupplier, - getConnectionCaptureFactory(params), headerCapturePredicate); + getConnectionCaptureFactory(params, rootContext), headerCapturePredicate); } catch (Exception e) { log.atError().setCause(e).setMessage("Caught exception while setting up the server and rethrowing").log(); throw e; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java new file mode 100644 index 000000000..6a9443eab --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java @@ -0,0 +1,18 @@ +package org.opensearch.migrations.trafficcapture.proxyserver; + +import io.opentelemetry.api.OpenTelemetry; +import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.IRootKafkaOffloaderContext; +import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.KafkaRecordContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; + +public class RootCaptureContext extends RootWireLoggingContext implements IRootKafkaOffloaderContext { + public RootCaptureContext(OpenTelemetry capture) { + super(capture); + } + + @Override + public KafkaRecordContext.MetricInstruments getKafkaOffloadingInstruments() { + var meter = getMeterProvider().get("captureProxy"); + return new KafkaRecordContext.MetricInstruments(meter); + } +} From ccc0e2a6ca2149b5418dc6c59725600722b3e082 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 13 Jan 2024 23:36:29 -0500 Subject: [PATCH 59/94] Bugfixes and test fixes to get all of the unit tests to pass. Signed-off-by: Greg Schohn --- .../KafkaCaptureFactoryTest.java | 2 +- .../TestRootKafkaOffloaderContext.java | 16 ++-- ...eamChannelConnectionCaptureSerializer.java | 2 +- .../tracing/IInstrumentConstructor.java | 5 +- .../tracing/IInstrumentationAttributes.java | 6 +- .../migrations/tracing/RootOtelContext.java | 4 +- .../netty/LoggingHttpHandler.java | 1 - .../netty/tracing/WireCaptureContexts.java | 1 + ...edTrafficToHttpTransactionAccumulator.java | 3 - .../replay/RequestResponsePacketPair.java | 6 +- .../NettyPacketToHttpConsumer.java | 4 +- .../http/HttpJsonTransformingConsumer.java | 2 +- .../datatypes/ISourceTrafficChannelKey.java | 14 +-- .../replay/datatypes/ITrafficStreamKey.java | 4 + .../datatypes/PojoTrafficStreamKey.java | 11 +-- .../PojoTrafficStreamKeyAndContext.java | 10 ++- .../replay/tracing/IReplayContexts.java | 29 +++--- .../replay/tracing/IRootReplayerContext.java | 3 +- .../replay/tracing/ReplayContexts.java | 23 +++-- .../replay/tracing/RootReplayerContext.java | 7 +- .../traffic/source/InputStreamOfTraffic.java | 2 +- .../replay/BlockingTrafficSourceTest.java | 5 +- ...xpiringTrafficStreamMapSequentialTest.java | 6 +- ...ExpiringTrafficStreamMapUnorderedTest.java | 6 +- .../replay/FullTrafficReplayerTest.java | 41 +++++---- .../replay/ParsedHttpMessagesAsDictsTest.java | 4 +- .../replay/ResultsToLogsConsumerTest.java | 8 +- ...afficToHttpTransactionAccumulatorTest.java | 10 +-- .../replay/tracing/TracingTest.java | 89 +++++++++++++++++++ .../migrations/replay/TestRequestKey.java | 17 ++-- .../TestTrafficStreamsLifecycleContext.java | 15 ---- .../migrations/replay/TestUtils.java | 5 +- .../migrations/tracing/TestContext.java | 8 ++ 33 files changed, 232 insertions(+), 137 deletions(-) create mode 100644 TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java delete mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 7a8a899fb..295372b8d 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -76,7 +76,7 @@ public void testLargeRequestIsWithinKafkaMessageSizeLimit() throws IOException, } private static ConnectionContext createCtx() { - return new ConnectionContext(new TestRootKafkaOffloaderContext(null), "test", "test"); + return new ConnectionContext(new TestRootKafkaOffloaderContext(), "test", "test"); } /** diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java index bab196e0b..b5382ce2f 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java @@ -1,11 +1,11 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing; -import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; import lombok.Getter; +import lombok.NonNull; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; import org.opensearch.migrations.tracing.RootOtelContext; @@ -26,19 +26,19 @@ public static TestRootKafkaOffloaderContext withTracking() { } public static TestRootKafkaOffloaderContext noTracking() { - return new TestRootKafkaOffloaderContext(new InMemoryInstrumentationBundle(null, null)); + return new TestRootKafkaOffloaderContext(); + } + + public TestRootKafkaOffloaderContext() { + this(new InMemoryInstrumentationBundle(null, null)); } public TestRootKafkaOffloaderContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) { - super("tests", inMemoryInstrumentationBundle.openTelemetrySdk); + super("tests", inMemoryInstrumentationBundle == null ? null : + inMemoryInstrumentationBundle.openTelemetrySdk); this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; final var meter = getMeterProvider().get("test"); this.kafkaOffloadingInstruments = new KafkaRecordContext.MetricInstruments(meter); this.connectionInstruments = new ConnectionContext.MetricInstruments(meter); } - - @Override - public Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { - return null; - } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java index b6fc61a46..5a04e239d 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java @@ -246,7 +246,7 @@ public void addDeregisterEvent(Instant timestamp) throws IOException { } private void addStringMessage(int captureFieldNumber, int dataFieldNumber, - Instant timestamp, String str) throws IOException { + Instant timestamp, @NonNull String str) throws IOException { int dataSize = 0; int lengthSize = 1; if (str.length() > 0) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index 5e979f046..571c70465 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -2,8 +2,9 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; +import lombok.NonNull; public interface IInstrumentConstructor { - Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, - AttributesBuilder attributesBuilder); + @NonNull Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, + AttributesBuilder attributesBuilder); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index b9cfbc2c0..9a3e427c4 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -6,6 +6,7 @@ import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; +import lombok.NonNull; import java.util.ArrayList; @@ -22,9 +23,10 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { Exception getObservedExceptionToIncludeInMetrics(); void setObservedExceptionToIncludeInMetrics(Exception e); - default Attributes getPopulatedMetricAttributes() { + default @NonNull Attributes getPopulatedMetricAttributes() { final var e = getObservedExceptionToIncludeInMetrics(); - return e == null ? null : Attributes.builder().put(HAD_EXCEPTION_KEY, true).build(); + var b = Attributes.builder(); + return e == null ? b.build() : b.put(HAD_EXCEPTION_KEY, true).build(); } default Attributes getPopulatedSpanAttributes() { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 4d5b05a55..5df850ac1 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -126,8 +126,8 @@ private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, S } @Override - public Span buildSpan(IInstrumentationAttributes enclosingScope, - String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { + public @NonNull Span buildSpan(IInstrumentationAttributes enclosingScope, + String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { var parentSpan = enclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); return buildSpanWithParent(spanBuilder, getPopulatedSpanAttributes(attributesBuilder), parentSpan, linkedSpan); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index d788e2a3f..ffb1f3ddb 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -200,7 +200,6 @@ public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { protected void channelFinishedReadingAnHttpMessage(ChannelHandlerContext ctx, Object msg, boolean shouldCapture, HttpRequest httpRequest) throws Exception { - assert messageContext instanceof IWireCaptureContexts.IRequestContext; messageContext = messageContext.createWaitingForResponseContext(); super.channelRead(ctx, msg); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java index a873b77ff..2cd7a6dbc 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -73,6 +73,7 @@ protected HttpMessageContext(RootWireLoggingContext rootWireLoggingContext, ICon long sourceRequestIndex) { super(rootWireLoggingContext, enclosingScope); this.sourceRequestIndex = sourceRequestIndex; + initializeSpan(); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index a8a11ccab..09c29baa6 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -9,12 +9,9 @@ import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index 303d53efc..d457d4e28 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -6,8 +6,6 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; -import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; @@ -43,7 +41,7 @@ public RequestResponsePacketPair(@NonNull ITrafficStreamKey startingAtTrafficStr startingSourceRequestIndex, indexOfCurrentRequest); var httpTransactionContext = startingAtTrafficStreamKey.getTrafficStreamsContext() .createHttpTransactionContext(requestKey, sourceTimestamp); - requestOrResponseAccumulationContext = new ReplayContexts.RequestAccumulationContext(httpTransactionContext); + requestOrResponseAccumulationContext = httpTransactionContext.createRequestAccumulationContext(); } @NonNull ISourceTrafficChannelKey getBeginningTrafficStreamKey() { @@ -75,7 +73,7 @@ public void rotateRequestGatheringToResponse() { var looseCtx = requestOrResponseAccumulationContext; assert looseCtx instanceof IReplayContexts.IRequestAccumulationContext; requestOrResponseAccumulationContext = - getRequestContext().getLogicalEnclosingScope().createAccumulatorContext(); + getRequestContext().getLogicalEnclosingScope().createResponseAccumulationContext(); } public void addRequestData(Instant packetTimeStamp, byte[] data) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 5d9adae03..4a4732773 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -62,7 +62,7 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer currentRequestContextUnion; public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri, SslContext sslContext, - ReplayContexts.HttpTransactionContext httpTransactionContext) { + IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { this(createClientConnection(eventLoopGroup, sslContext, serverUri, httpTransactionContext.getLogicalEnclosingScope()), httpTransactionContext); } @@ -70,7 +70,7 @@ public NettyPacketToHttpConsumer(NioEventLoopGroup eventLoopGroup, URI serverUri public NettyPacketToHttpConsumer(ChannelFuture clientConnection, IReplayContexts.IReplayerHttpTransactionContext ctx) { var parentContext = ctx.createTargetRequestContext(); - this.setCurrentRequestContext(new ReplayContexts.RequestSendingContext(parentContext)); + this.setCurrentRequestContext(parentContext.createHttpSendingContext()); responseBuilder = AggregatedRawResponse.builder(Instant.now()); DiagnosticTrackableCompletableFuture initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 8fe3b298a..ca1918e7d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -51,7 +51,7 @@ public class HttpJsonTransformingConsumer implements IPacketFinalizingConsume private final RequestPipelineOrchestrator pipelineOrchestrator; private final EmbeddedChannel channel; private static final MetricsLogger metricsLogger = new MetricsLogger("HttpJsonTransformingConsumer"); - private ReplayContexts.RequestTransformationContext transformationContext; + private IReplayContexts.IRequestTransformationContext transformationContext; /** * Roughly try to keep track of how big each data chunk was that came into the transformer. These values diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java index 4648bceb9..252c00fd0 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java @@ -1,12 +1,16 @@ package org.opensearch.migrations.replay.datatypes; -import lombok.NonNull; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; -import org.opensearch.migrations.tracing.IInstrumentConstructor; +import lombok.AllArgsConstructor; +import lombok.Getter; public interface ISourceTrafficChannelKey { String getNodeId(); String getConnectionId(); - @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); + + @Getter + @AllArgsConstructor + class PojoImpl implements ISourceTrafficChannelKey { + String nodeId; + String connectionId; + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java index ab9d6ced4..618b37246 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java @@ -1,5 +1,9 @@ package org.opensearch.migrations.replay.datatypes; +import lombok.NonNull; +import org.opensearch.migrations.replay.tracing.IReplayContexts; + public interface ITrafficStreamKey extends ISourceTrafficChannelKey { int getTrafficStreamIndex(); + @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java index 77ecd33ad..02392ac21 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java @@ -4,16 +4,17 @@ import org.opensearch.migrations.replay.util.TrafficChannelKeyFormatter; @EqualsAndHashCode() -public abstract class PojoTrafficStreamKey implements ITrafficStreamKey { - protected final String nodeId; - protected final String connectionId; +public abstract class PojoTrafficStreamKey extends ISourceTrafficChannelKey.PojoImpl + implements ITrafficStreamKey { protected final int trafficStreamIndex; protected PojoTrafficStreamKey(String nodeId, String connectionId, int index) { - this.nodeId = nodeId; - this.connectionId = connectionId; + super(nodeId, connectionId); this.trafficStreamIndex = index; } + protected PojoTrafficStreamKey(PojoImpl tsk, int index) { + this(tsk.nodeId, tsk.connectionId, index); + } @Override public String getNodeId() { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java index 887ff16eb..279dcab82 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java @@ -25,8 +25,10 @@ public class PojoTrafficStreamKeyAndContext extends PojoTrafficStreamKey { return rval; } - protected PojoTrafficStreamKeyAndContext(TrafficStream stream) { - this(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); + public static PojoTrafficStreamKeyAndContext + build(ISourceTrafficChannelKey sourceKey, int index, + Function contextSupplier) { + return build(sourceKey.getNodeId(), sourceKey.getConnectionId(), index, contextSupplier); } public static PojoTrafficStreamKeyAndContext @@ -37,6 +39,10 @@ protected PojoTrafficStreamKeyAndContext(TrafficStream stream) { return rval; } + protected PojoTrafficStreamKeyAndContext(TrafficStream stream) { + this(stream.getNodeId(), stream.getConnectionId(), TrafficStreamUtils.getTrafficStreamIndex(stream)); + } + private PojoTrafficStreamKeyAndContext(String nodeId, String connectionId, int index) { super(nodeId, connectionId, index); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 0c5010de5..e52578066 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -7,8 +7,6 @@ import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext; import java.time.Instant; @@ -65,7 +63,7 @@ public interface IAccumulationScope extends IScopedInstrumentationAttributes { public interface IChannelKeyContext extends IAccumulationScope, - IConnectionContext { + org.opensearch.migrations.tracing.commoncontexts.IConnectionContext { String ACTIVITY_NAME = ActivityNames.CHANNEL; @Override default String getActivityName() { return ACTIVITY_NAME;} @@ -108,9 +106,6 @@ public interface ITrafficStreamsLifecycleContext IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TRAFFIC_STREAM_LIFETIME; - ReplayContexts.HttpTransactionContext createHttpTransactionContext(UniqueReplayerRequestKey requestKey, - Instant sourceTimestamp); - @Override default String getActivityName() { return ACTIVITY_NAME;} ITrafficStreamKey getTrafficStreamKey(); IChannelKeyContext getChannelKeyContext(); @@ -120,16 +115,17 @@ default String getConnectionId() { default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } + + IReplayerHttpTransactionContext createHttpTransactionContext(UniqueReplayerRequestKey requestKey, + Instant sourceTimestamp); } public interface IReplayerHttpTransactionContext - extends IHttpTransactionContext, + extends org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext, IAccumulationScope, IWithTypedEnclosingScope { AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); - ITupleHandlingContext createTupleContext(); - String ACTIVITY_NAME = ActivityNames.HTTP_TRANSACTION; @Override default String getActivityName() { return ACTIVITY_NAME;} @@ -154,17 +150,16 @@ default long replayerRequestIndex() { @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IHttpTransactionContext.super.fillAttributes( + return org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext.super.fillAttributes( builder.put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex())); } - ReplayContexts.RequestTransformationContext createTransformationContext(); - - IScopedInstrumentationAttributes createAccumulatorContext(); - - ReplayContexts.TargetRequestContext createTargetRequestContext(); - + IRequestAccumulationContext createRequestAccumulationContext(); + IResponseAccumulationContext createResponseAccumulationContext(); + IRequestTransformationContext createTransformationContext(); IScheduledContext createScheduledContext(Instant timestamp); + ITargetRequestContext createTargetRequestContext(); + ITupleHandlingContext createTupleContext(); } public interface IRequestAccumulationContext @@ -231,8 +226,8 @@ public interface ITargetRequestContext void onBytesSent(int size); void onBytesReceived(int size); + IRequestSendingContext createHttpSendingContext(); IReceivingHttpResponseContext createHttpReceivingContext(); - IWaitingForHttpResponseContext createWaitingForResponseContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java index 3698c65b9..ec5301248 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.replay.tracing; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; @@ -7,5 +8,5 @@ public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { ITrafficSourceContexts.IReadChunkContext createReadChunkContext(); - IReplayContexts.IChannelKeyContext createChannelContext(ITrafficStreamKey tsk); + IReplayContexts.IChannelKeyContext createChannelContext(ISourceTrafficChannelKey tsk); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index dd13c833a..edb0ff439 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -13,7 +13,6 @@ import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.time.Duration; import java.time.Instant; @@ -109,19 +108,19 @@ public String getRecordId() { @Override public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficLifecyleContext(ITrafficStreamKey tsk) { - return new ReplayContexts.TrafficStreamsLifecycleContext(this.getRootInstrumentationScope(), this, tsk + return new TrafficStreamLifecycleContext(this.getRootInstrumentationScope(), this, tsk ); } } - public static class TrafficStreamsLifecycleContext + public static class TrafficStreamLifecycleContext extends BaseNestedSpanContext implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; - protected TrafficStreamsLifecycleContext(RootReplayerContext rootScope, - IInstrumentationAttributes enclosingScope, - ITrafficStreamKey trafficStreamKey) { + protected TrafficStreamLifecycleContext(RootReplayerContext rootScope, + IInstrumentationAttributes enclosingScope, + ITrafficStreamKey trafficStreamKey) { super(rootScope, enclosingScope); this.trafficStreamKey = trafficStreamKey; initializeSpan(); @@ -209,11 +208,14 @@ public RequestTransformationContext createTransformationContext() { return new ReplayContexts.RequestTransformationContext(this); } + public IReplayContexts.IRequestAccumulationContext createRequestAccumulationContext() { + return new ReplayContexts.RequestAccumulationContext(this); + } + @Override - public IScopedInstrumentationAttributes createAccumulatorContext() { + public IReplayContexts.IResponseAccumulationContext createResponseAccumulationContext() { return new ReplayContexts.ResponseAccumulationContext(this); } - @Override public TargetRequestContext createTargetRequestContext() { return new ReplayContexts.TargetRequestContext(this); @@ -463,6 +465,11 @@ public void onBytesReceived(int size) { meterIncrementEvent(getMetrics().bytesRead, size); } + @Override + public IRequestSendingContext createHttpSendingContext() { + return new ReplayContexts.RequestSendingContext(this); + } + @Override public IReplayContexts.IReceivingHttpResponseContext createHttpReceivingContext() { return new ReplayContexts.ReceivingHttpResponseContext(this); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index db1eae243..4d2306460 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -1,6 +1,7 @@ package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.OpenTelemetry; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.tracing.RootOtelContext; @@ -23,7 +24,7 @@ public class RootReplayerContext extends RootOtelContext implements IRootReplaye public final ReplayContexts.ChannelKeyContext.MetricInstruments channelKeyInstruments; public final ReplayContexts.KafkaRecordContext.MetricInstruments kafkaRecordInstruments; - public final ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments trafficStreamLifecycleInstruments; + public final ReplayContexts.TrafficStreamLifecycleContext.MetricInstruments trafficStreamLifecycleInstruments; public final ReplayContexts.HttpTransactionContext.MetricInstruments httpTransactionInstruments; public final ReplayContexts.RequestAccumulationContext.MetricInstruments requestAccumInstruments; public final ReplayContexts.ResponseAccumulationContext.MetricInstruments responseAccumInstruments; @@ -51,7 +52,7 @@ public RootReplayerContext(OpenTelemetry sdk) { channelKeyInstruments = new ReplayContexts.ChannelKeyContext.MetricInstruments(meter); kafkaRecordInstruments = new ReplayContexts.KafkaRecordContext.MetricInstruments(meter); - trafficStreamLifecycleInstruments = new ReplayContexts.TrafficStreamsLifecycleContext.MetricInstruments(meter); + trafficStreamLifecycleInstruments = new ReplayContexts.TrafficStreamLifecycleContext.MetricInstruments(meter); httpTransactionInstruments = new ReplayContexts.HttpTransactionContext.MetricInstruments(meter); requestAccumInstruments = new ReplayContexts.RequestAccumulationContext.MetricInstruments(meter); responseAccumInstruments = new ReplayContexts.ResponseAccumulationContext.MetricInstruments(meter); @@ -69,7 +70,7 @@ public TrafficSourceContexts.ReadChunkContext createReadChunkContext() { return new TrafficSourceContexts.ReadChunkContext(this, this); } - public IReplayContexts.IChannelKeyContext createChannelContext(ITrafficStreamKey tsk) { + public IReplayContexts.IChannelKeyContext createChannelContext(ISourceTrafficChannelKey tsk) { return new ReplayContexts.ChannelKeyContext(this, this, tsk); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index d9e1baa02..2fff58b4d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -31,7 +31,7 @@ public InputStreamOfTraffic(RootReplayerContext context, InputStream inputStream this.inputStream = inputStream; } - public static final class IOSTrafficStreamContext extends ReplayContexts.TrafficStreamsLifecycleContext { + public static final class IOSTrafficStreamContext extends ReplayContexts.TrafficStreamLifecycleContext { public IOSTrafficStreamContext(RootReplayerContext rootReplayerContext, IReplayContexts.IChannelKeyContext enclosingScope, ITrafficStreamKey trafficStreamKey) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java index 57ac36964..a98d27462 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/BlockingTrafficSourceTest.java @@ -12,8 +12,6 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; @@ -107,8 +105,7 @@ private static class TestTrafficCaptureSource implements ISimpleTrafficCaptureSo .setClose(CloseObservation.getDefaultInstance()) .build()) .build(); - var key = PojoTrafficStreamKeyAndContext.build(ts, - tsk->new TestTrafficStreamsLifecycleContext(rootContext, tsk)); + var key = PojoTrafficStreamKeyAndContext.build(ts, rootContext::createTrafficStreamContextForTest); return CompletableFuture.completedFuture(List.of(new PojoTrafficStreamAndKey(ts, key))); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java index a078ae8c8..0c31bcd1c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java @@ -36,15 +36,15 @@ public void onExpireAccumulation(String partitionId, for (int i=0; inew TestTrafficStreamsLifecycleContext(context, k)); + context::createTrafficStreamContextForTest); var accumulation = expiringMap.getOrCreateWithoutExpiration(tsk, k->new Accumulation(tsk, 0)); createdAccumulations.add(accumulation); expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, - connectionGenerator.apply(i), 0, k->new TestTrafficStreamsLifecycleContext(context, k)), + connectionGenerator.apply(i), 0, context::createTrafficStreamContextForTest), accumulation, ts); var rrPair = createdAccumulations.get(i).getOrCreateTransactionPair( PojoTrafficStreamKeyAndContext.build("n","c",1, - k->new TestTrafficStreamsLifecycleContext(context, k)), Instant.EPOCH); + context::createTrafficStreamContextForTest), Instant.EPOCH); rrPair.addResponseData(ts, ("Add"+i).getBytes(StandardCharsets.UTF_8)); expiredCountsPerLoop.add(expiredAccumulations.size()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java index c7508382b..9a6cf584f 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java @@ -39,16 +39,16 @@ public void onExpireAccumulation(String partitionId, for (int i=0; inew TestTrafficStreamsLifecycleContext(context, k)); + context::createTrafficStreamContextForTest); var accumulation = expiringMap.getOrCreateWithoutExpiration(tsk, k->new Accumulation(tsk, 0)); expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, connectionGenerator.apply(i), 0, - k->new TestTrafficStreamsLifecycleContext(context, k)), + context::createTrafficStreamContextForTest), accumulation, ts); createdAccumulations.add(accumulation); if (accumulation != null) { var rrPair = accumulation.getOrCreateTransactionPair(PojoTrafficStreamKeyAndContext.build("n","c",1, - k->new TestTrafficStreamsLifecycleContext(context, k)), Instant.EPOCH); + context::createTrafficStreamContextForTest), Instant.EPOCH); rrPair.addResponseData(ts, ("Add" + i).getBytes(StandardCharsets.UTF_8)); } expiredCountsPerLoop.add(expiredAccumulations.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index a192077ed..75ed95f8d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -19,13 +19,11 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; @@ -49,10 +47,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Consumer; -import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; -import java.util.stream.Stream; @Slf4j // It would be great to test with leak detection here, but right now this test relies upon TrafficReplayer.shutdown() @@ -97,7 +93,8 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { .addSubStream(TrafficObservation.newBuilder() .setClose(CloseObservation.newBuilder().build()).build()) .build(); - var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(List.of(trafficStreamWithJustClose)); + var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(TestContext.noTracking(), + List.of(trafficStreamWithJustClose)); TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(0, httpServer.localhostEndpoint(), new IndexWatchingListenerFactory(), trafficSourceSupplier); Assertions.assertEquals(1, trafficSourceSupplier.nextReadCursor.get()); @@ -137,9 +134,10 @@ public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) thro var trafficStream = tsb.addSubStream(TrafficObservation.newBuilder().setTs(fixedTimestamp) .setClose(CloseObservation.getDefaultInstance())) .build(); - var trafficSource = - new ArrayCursorTrafficCaptureSource(new ArrayCursorTrafficSourceFactory(List.of(trafficStream))); var trackingContext = TestContext.withTracking(); + var trafficSource = + new ArrayCursorTrafficCaptureSource(trackingContext, + new ArrayCursorTrafficSourceFactory(trackingContext, List.of(trafficStream))); var tr = new TrafficReplayer(trackingContext, httpServer.localhostEndpoint(), null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10*1024); @@ -168,17 +166,16 @@ public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) thro */ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSpanExporter, int numRequests) { var byName = testSpanExporter.getFinishedSpanItems().stream().collect(Collectors.groupingBy(SpanData::getName)); - BiConsumer chk = (i, k)-> { + BiConsumer chk = (i, k) -> { Assertions.assertNotNull(byName.get(k)); Assertions.assertEquals(i, byName.get(k).size()); byName.remove(k); }; chk.accept(1,"channel"); - chk.accept(1, "testTrafficSpan"); - + chk.accept(1, "trafficStreamLifetime"); + chk.accept(numRequests, "httpTransaction"); chk.accept(numRequests, "accumulatingRequest"); chk.accept(numRequests, "accumulatingResponse"); - chk.accept(numRequests, "httpTransaction"); chk.accept(numRequests, "transformation"); chk.accept(numRequests, "targetTransaction"); chk.accept(numRequests*2, "scheduled"); @@ -218,7 +215,8 @@ public void fullTest(int testSize, boolean randomize) throws Throwable { var trafficStreams = streamAndConsumer.stream.collect(Collectors.toList()); log.atInfo().setMessage(()->trafficStreams.stream().map(ts->TrafficStreamUtils.summarizeTrafficStream(ts)) .collect(Collectors.joining("\n"))).log(); - var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(trafficStreams); + var rootContext = TestContext.noTracking(); + var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(rootContext, trafficStreams); TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(numExpectedRequests, httpServer.localhostEndpoint(), new IndexWatchingListenerFactory(), trafficSourceSupplier); Assertions.assertEquals(trafficSourceSupplier.trafficStreamsList.size(), trafficSourceSupplier.nextReadCursor.get()); @@ -236,13 +234,13 @@ private static class TrafficStreamCursorKey implements ITrafficStreamKey, Compar public final int trafficStreamIndex; @Getter public final IReplayContexts.ITrafficStreamsLifecycleContext trafficStreamsContext; - public TrafficStreamCursorKey(RootReplayerContext ctx, TrafficStream stream, int arrayIndex) { + public TrafficStreamCursorKey(TestContext context, TrafficStream stream, int arrayIndex) { connectionId = stream.getConnectionId(); nodeId = stream.getNodeId(); trafficStreamIndex = TrafficStreamUtils.getTrafficStreamIndex(stream); this.arrayIndex = arrayIndex; - var key = PojoTrafficStreamKeyAndContext.build(nodeId, connectionId, trafficStreamIndex, tsk-> - new TestTrafficStreamsLifecycleContext(ctx, tsk)); + var key = PojoTrafficStreamKeyAndContext.build(nodeId, connectionId, trafficStreamIndex, + context::createTrafficStreamContextForTest); trafficStreamsContext = key.getTrafficStreamsContext(); key.setTrafficStreamsContext(trafficStreamsContext); } @@ -254,15 +252,17 @@ public int compareTo(TrafficStreamCursorKey other) { } private static class ArrayCursorTrafficSourceFactory implements Supplier { + private final TestContext rootContext; List trafficStreamsList; AtomicInteger nextReadCursor = new AtomicInteger(); - public ArrayCursorTrafficSourceFactory(List trafficStreamsList) { + public ArrayCursorTrafficSourceFactory(TestContext rootContext, List trafficStreamsList) { + this.rootContext = rootContext; this.trafficStreamsList = trafficStreamsList; } public ISimpleTrafficCaptureSource get() { - var rval = new ArrayCursorTrafficCaptureSource(this); + var rval = new ArrayCursorTrafficCaptureSource(rootContext, this); log.info("trafficSource="+rval+" readCursor="+rval.readCursor.get()+" nextReadCursor="+ nextReadCursor.get()); return rval; } @@ -275,13 +275,14 @@ private static class ArrayCursorTrafficCaptureSource implements ISimpleTrafficCa ArrayCursorTrafficSourceFactory arrayCursorTrafficSourceFactory; TestContext rootContext; - public ArrayCursorTrafficCaptureSource(ArrayCursorTrafficSourceFactory arrayCursorTrafficSourceFactory) { + public ArrayCursorTrafficCaptureSource(TestContext rootContext, + ArrayCursorTrafficSourceFactory arrayCursorTrafficSourceFactory) { var startingCursor = arrayCursorTrafficSourceFactory.nextReadCursor.get(); log.info("startingCursor = " + startingCursor); this.readCursor = new AtomicInteger(startingCursor); this.arrayCursorTrafficSourceFactory = arrayCursorTrafficSourceFactory; cursorHighWatermark = startingCursor; - rootContext = TestContext.noTracking(); + this.rootContext = rootContext; } @Override @@ -321,6 +322,8 @@ public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) { log.info("Commit called for "+trafficStreamKey+", but topCursor="+topCursor); } } + rootContext.channelContextManager.releaseContextFor( + ((TrafficStreamCursorKey) trafficStreamKey).trafficStreamsContext.getChannelKeyContext()); return CommitResult.Immediate; } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index 9331b7cbc..9290be8e6 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -12,9 +12,11 @@ class ParsedHttpMessagesAsDictsTest { + static TestContext rootContext = TestContext.noTracking(); + private static final PojoTrafficStreamKeyAndContext TEST_TRAFFIC_STREAM_KEY = PojoTrafficStreamKeyAndContext.build("N","C",1, - k->new TestTrafficStreamsLifecycleContext(TestContext.noTracking(), k)); + k->rootContext.createTrafficStreamContextForStreamSource(rootContext.createChannelContext(k), k)); ParsedHttpMessagesAsDicts makeTestData() { return makeTestData(null, null); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 396da06b4..80d29ef02 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -80,7 +80,7 @@ public void testOutputterWithNulls() throws IOException { var context = TestContext.noTracking(); var emptyTuple = new SourceTargetCaptureTuple( new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - k->new TestTrafficStreamsLifecycleContext(context, k)), 0, 0), + context::createTrafficStreamContextForTest), 0, 0), null, null, null, null, null, null); try (var closeableLogSetup = new CloseableLogSetup()) { var consumer = new TupleParserChainConsumer(null, new ResultsToLogsConsumer()); @@ -98,7 +98,7 @@ public void testOutputterWithException() throws IOException { var exception = new Exception(TEST_EXCEPTION_MESSAGE); var emptyTuple = new SourceTargetCaptureTuple( new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - k->new TestTrafficStreamsLifecycleContext(context, k)), 0, 0), + context::createTrafficStreamContextForTest), 0, 0), null, null, null, null, exception, null); try (var closeableLogSetup = new CloseableLogSetup()) { @@ -235,9 +235,7 @@ public void testOutputterForPost() throws IOException { private void testOutputterForRequest(String requestResourceName, String expected) throws IOException { var context = TestContext.noTracking(); var trafficStreamKey = PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - k->new TestTrafficStreamsLifecycleContext(context, k)); - var requestCtx = TestRequestKey.getTestConnectionRequestContext(context, 0); - trafficStreamKey.setTrafficStreamsContext(requestCtx.getImmediateEnclosingScope()); + context::createTrafficStreamContextForTest); var sourcePair = new RequestResponsePacketPair(trafficStreamKey, Instant.EPOCH, 0, 0); var rawRequestData = loadResourceAsBytes("/requests/raw/" + requestResourceName); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 4df76b056..465e2ece2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -13,19 +13,15 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.time.Duration; @@ -126,7 +122,7 @@ static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, List directives, TestContext rootContext) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); var tsk = PojoTrafficStreamKeyAndContext.build("n", "test", uniqueIdCounter.incrementAndGet(), - k->rootContext.createChannelContext(k).getChannelKey().getTrafficStreamsContext()); + rootContext::createTrafficStreamContextForTest); var offloader = connectionFactory.createOffloader(TestContext.noTracking().createChannelContext(tsk)); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); @@ -218,7 +214,7 @@ void generateAndTest(String testName, int bufferSize, int skipCount, * @return */ static SortedSet - accumulateTrafficStreamsWithNewAccumulator(RootReplayerContext context, + accumulateTrafficStreamsWithNewAccumulator(TestContext context, Stream trafficStreams, List aggregations, AtomicInteger requestsReceived) { @@ -277,7 +273,7 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel ; trafficStreams.forEach(ts->trafficAccumulator.accept( new PojoTrafficStreamAndKey(ts, PojoTrafficStreamKeyAndContext.build(ts, - k->new TestTrafficStreamsLifecycleContext(context, k)) + context::createTrafficStreamContextForTest) ))); trafficAccumulator.close(); return tsIndicesReceived; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java new file mode 100644 index 000000000..93c751fc0 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -0,0 +1,89 @@ +package org.opensearch.migrations.replay.tracing; + +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.trace.data.SpanData; +import lombok.Getter; +import lombok.Lombok; +import lombok.Setter; +import lombok.SneakyThrows; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.TestContext; + +import java.time.Duration; +import java.time.Instant; +import java.util.Arrays; +import java.util.List; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class TracingTest { + @Test + public void tracingWorks() { + TestContext rootContext = TestContext.withTracking(); + var tssk = new ISourceTrafficChannelKey.PojoImpl("n", "c"); + try (var channelCtx = rootContext.createChannelContext(tssk); + var kafkaRecordCtx = + rootContext.createTrafficStreamContextForKafkaSource(channelCtx, "testRecordId", 127)) { + var tsk = PojoTrafficStreamKeyAndContext.build(tssk, 1, kafkaRecordCtx::createTrafficLifecyleContext); + try (var tskCtx = tsk.getTrafficStreamsContext()) { // made in the callback of the previous call + var urk = new UniqueReplayerRequestKey(tsk, 1, 0); + try (var httpCtx = tskCtx.createHttpTransactionContext(urk, Instant.EPOCH)) { + try (var ctx = httpCtx.createRequestAccumulationContext()) { + } + try (var ctx = httpCtx.createResponseAccumulationContext()) { + } + try (var ctx = httpCtx.createTransformationContext()) { + } + try (var ctx = httpCtx.createScheduledContext(Instant.now().plus(Duration.ofSeconds(1)))) { + } + try (var targetRequestCtx = httpCtx.createTargetRequestContext()) { + try (var ctx = targetRequestCtx.createHttpSendingContext()) { + } + try (var ctx = targetRequestCtx.createWaitingForResponseContext()) { + } + try (var ctx = targetRequestCtx.createHttpReceivingContext()) { + } + } + try (var ctx = httpCtx.createTupleContext()) { + } + } + } + } + + var recordedSpans = rootContext.inMemoryInstrumentationBundle.testSpanExporter.getFinishedSpanItems(); + var recordedMetrics = rootContext.inMemoryInstrumentationBundle.testMetricExporter.getFinishedMetricItems(); + + checkSpans(recordedSpans); + checkMetrics(recordedMetrics); + } + + private void checkMetrics(List recordedMetrics) { + } + + private void checkSpans(List recordedSpans) { + var byName = recordedSpans.stream().collect(Collectors.groupingBy(SpanData::getName)); + var keys = Arrays.stream(IReplayContexts.ActivityNames.class.getFields()).map(f-> { + try { + return f.get(null); + } catch (Exception e) { + Lombok.sneakyThrow(e); + return null; + } + }).toArray(String[]::new); + Stream.of(keys).forEach(spanName -> { + Assertions.assertNotNull(byName.get(spanName)); + Assertions.assertEquals(1, byName.get(spanName).size()); + byName.remove(spanName); + }); + + Assertions.assertEquals("", byName.entrySet().stream() + .map(kvp->kvp.getKey()+":"+kvp.getValue()).collect(Collectors.joining())); + } +} diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java index ff770bfdc..59a271d90 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java @@ -2,11 +2,11 @@ import java.time.Instant; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.TestContext; public class TestRequestKey { @@ -15,18 +15,17 @@ public class TestRequestKey { private TestRequestKey() {} - public static final ReplayContexts.HttpTransactionContext - getTestConnectionRequestContext(RootReplayerContext ctx, int replayerIdx) { + public static final IReplayContexts.IReplayerHttpTransactionContext + getTestConnectionRequestContext(TestContext ctx, int replayerIdx) { return getTestConnectionRequestContext(ctx, DEFAULT_TEST_CONNECTION, replayerIdx); } - public static ReplayContexts.HttpTransactionContext - getTestConnectionRequestContext(RootReplayerContext ctx, String connectionId, int replayerIdx) { + public static IReplayContexts.IReplayerHttpTransactionContext + getTestConnectionRequestContext(TestContext ctx, String connectionId, int replayerIdx) { var rk = new UniqueReplayerRequestKey( PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, - tsk -> new TestTrafficStreamsLifecycleContext(ctx, tsk)), + ctx::createTrafficStreamContextForTest), 0, replayerIdx); - return new ReplayContexts.HttpTransactionContext(ctx, rk.trafficStreamKey.getTrafficStreamsContext(), - rk, Instant.EPOCH); + return rk.trafficStreamKey.getTrafficStreamsContext().createHttpTransactionContext(rk, Instant.EPOCH); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java deleted file mode 100644 index ac6e0531f..000000000 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestTrafficStreamsLifecycleContext.java +++ /dev/null @@ -1,15 +0,0 @@ -package org.opensearch.migrations.replay; - -import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.ReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; - -class TestTrafficStreamsLifecycleContext extends ReplayContexts.TrafficStreamsLifecycleContext { - private final ITrafficStreamKey trafficStreamKey; - - public TestTrafficStreamsLifecycleContext(RootReplayerContext rootContext, ITrafficStreamKey tsk) { - super(rootContext, new ReplayContexts.ChannelKeyContext(rootContext, rootContext, tsk), tsk); - this.trafficStreamKey = tsk; - initializeSpan(); - } -} diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index b6f56e1c5..6541887fe 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -16,6 +16,7 @@ import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; @@ -122,7 +123,7 @@ private static String getStringFromContent(FullHttpRequest fullRequest) throws I return baos.toString(StandardCharsets.UTF_8); } } - static void runPipelineAndValidate(RootReplayerContext rootContext, + static void runPipelineAndValidate(TestContext rootContext, IAuthTransformerFactory authTransformer, String extraHeaders, List stringParts, @@ -133,7 +134,7 @@ static void runPipelineAndValidate(RootReplayerContext rootContext, authTransformer, extraHeaders, stringParts, expectedRequestHeaders, expectedOutputGenerator); } - static void runPipelineAndValidate(RootReplayerContext rootContext, + static void runPipelineAndValidate(TestContext rootContext, IJsonTransformer transformer, IAuthTransformerFactory authTransformer, String extraHeaders, diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index c1984ecf1..efd073f98 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -2,11 +2,15 @@ import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; public class TestContext extends RootReplayerContext { public final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; + public final ChannelContextManager channelContextManager = new ChannelContextManager(this); public static TestContext withTracking() { return new TestContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), @@ -21,4 +25,8 @@ public TestContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) super(inMemoryInstrumentationBundle.openTelemetrySdk); this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; } + + public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficStreamContextForTest(ITrafficStreamKey tsk) { + return createTrafficStreamContextForStreamSource(channelContextManager.retainOrCreateContext(tsk), tsk); + } } From 07444245dec9a88ec0afb6ae8a3406262cfceb55 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 15 Jan 2024 00:28:42 -0500 Subject: [PATCH 60/94] Bugfixes. Stop metering double events in a couple spots and fix a connection id naming bug that was causing FullReplayerTests to fail. Signed-off-by: Greg Schohn --- .../trafficcapture/tracing/ConnectionContext.java | 9 --------- .../tracing/commoncontexts/IConnectionContext.java | 4 ---- .../migrations/replay/ClientConnectionPool.java | 1 - .../replay/datahandlers/NettyPacketToHttpConsumer.java | 1 - .../migrations/replay/tracing/ReplayContexts.java | 9 --------- ...eCapturedTrafficToHttpTransactionAccumulatorTest.java | 4 ++-- 6 files changed, 2 insertions(+), 26 deletions(-) diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index fceb09a3d..8d410dc80 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -55,15 +55,6 @@ public MetricInstruments(Meter meter) { @Override public void sendMeterEventsForEnd() { - meterDeltaEvent(getMetrics().activeConnectionsCounter, 1); - } - - @Override - public void onConnectionCreated() { - meterDeltaEvent(getMetrics().activeConnectionsCounter, 1); - } - @Override - public void onConnectionClosed() { meterDeltaEvent(getMetrics().activeConnectionsCounter, -1); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 314b5a9e9..845b13f40 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -20,8 +20,4 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder.put(CONNECTION_ID_ATTR, getConnectionId()) .put(NODE_ID_ATTR, getNodeId()); } - - void onConnectionCreated(); - - void onConnectionClosed(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index ffe036611..b81a3bfe5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -177,7 +177,6 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte .thenAccept(cf-> { cf.channel().close() .addListener(closeFuture -> { - channelAndFutureWork.getChannelContext().onConnectionClosed(); if (closeFuture.isSuccess()) { channelClosedFuture.future.complete(channelAndFutureWork.getInnerChannelFuture().channel()); } else { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 4a4732773..b78b8fa4b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -126,7 +126,6 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup var rval = new DefaultChannelPromise(outboundChannelFuture.channel()); outboundChannelFuture.addListener((ChannelFutureListener) connectFuture -> { if (connectFuture.isSuccess()) { - channelKeyContext.onConnectionCreated(); var pipeline = connectFuture.channel().pipeline(); pipeline.removeFirst(); log.atTrace().setMessage(()-> channelKeyContext.getChannelKey() + diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index edb0ff439..0ec019566 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -53,15 +53,6 @@ public MetricInstruments(Meter meter) { public String toString() { return channelKey.toString(); } - - @Override - public void onConnectionCreated() { - meterDeltaEvent(getMetrics().activeChannelCounter, 1); - } - @Override - public void onConnectionClosed() { - meterDeltaEvent(getMetrics().activeChannelCounter, -1); - } } public static class KafkaRecordContext diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 465e2ece2..542bb513e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -121,8 +121,8 @@ static ByteBuf makeSequentialByteBuf(int offset, int size) { static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, AtomicInteger uniqueIdCounter, List directives, TestContext rootContext) throws Exception { var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); - var tsk = PojoTrafficStreamKeyAndContext.build("n", "test", uniqueIdCounter.incrementAndGet(), - rootContext::createTrafficStreamContextForTest); + var tsk = PojoTrafficStreamKeyAndContext.build("n", "test_"+uniqueIdCounter.incrementAndGet(), + 0, rootContext::createTrafficStreamContextForTest); var offloader = connectionFactory.createOffloader(TestContext.noTracking().createChannelContext(tsk)); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); From 35a91854be37a2dc4808291bb79ae6f929ec9069 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 16 Jan 2024 12:22:57 -0500 Subject: [PATCH 61/94] Upgrade otel libraries to 1.34.1 from 1.32 and add the enable_open_metrics flag to the prometheus exporter for the otel collector to support exemplars. Signed-off-by: Greg Schohn --- TrafficCapture/captureKafkaOffloader/build.gradle | 2 +- TrafficCapture/captureOffloader/build.gradle | 2 +- TrafficCapture/coreUtilities/build.gradle | 4 ++-- .../dockerSolution/src/main/docker/otel-collector-config.yaml | 3 +++ TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml | 3 ++- TrafficCapture/nettyWireLogging/build.gradle | 2 +- TrafficCapture/trafficCaptureProxyServer/build.gradle | 2 +- TrafficCapture/trafficReplayer/build.gradle | 4 ++-- 8 files changed, 13 insertions(+), 9 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index e3ec41298..b5a7c1ec8 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -9,7 +9,7 @@ repositories { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation project(':captureOffloader') implementation project(':coreUtilities') diff --git a/TrafficCapture/captureOffloader/build.gradle b/TrafficCapture/captureOffloader/build.gradle index d4b9848dd..f36862828 100644 --- a/TrafficCapture/captureOffloader/build.gradle +++ b/TrafficCapture/captureOffloader/build.gradle @@ -20,7 +20,7 @@ sourceSets { } } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") api group: 'io.netty', name: 'netty-buffer', version: '4.1.100.Final' implementation project(':captureProtobufs') diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index 0fd1e6171..bac6ec805 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -41,7 +41,7 @@ repositories { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation project(':captureProtobufs') @@ -68,7 +68,7 @@ dependencies { // OpenTelemetry log4j appender implementation("io.opentelemetry.instrumentation:opentelemetry-log4j-appender-2.17:1.30.0-alpha") - testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk-testing' } diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml index b32005459..c08cb8788 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml @@ -12,6 +12,9 @@ processors: exporters: prometheus: endpoint: "0.0.0.0:8889" + send_timestamps: true + metric_expiration: 5m + enable_open_metrics: true logging: loglevel: debug diff --git a/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml b/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml index ddea76205..028af3f5e 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml @@ -1,6 +1,7 @@ scrape_configs: - job_name: 'otel-collector' - scrape_interval: 2s + scrape_interval: 1s + honor_timestamps: true static_configs: - targets: ['otel-collector:8889'] - targets: ['otel-collector:8888'] diff --git a/TrafficCapture/nettyWireLogging/build.gradle b/TrafficCapture/nettyWireLogging/build.gradle index 523fe0078..24ff8217c 100644 --- a/TrafficCapture/nettyWireLogging/build.gradle +++ b/TrafficCapture/nettyWireLogging/build.gradle @@ -8,7 +8,7 @@ plugins { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation platform("io.netty:netty-bom:4.1.100.Final") implementation project(':captureOffloader') diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index 140fafcd1..455655fbd 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -13,7 +13,7 @@ configurations { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation 'org.opensearch.plugin:opensearch-security:2.11.1.0' implementation 'org.opensearch:opensearch-common:2.11.0' diff --git a/TrafficCapture/trafficReplayer/build.gradle b/TrafficCapture/trafficReplayer/build.gradle index fadad7229..b8797a3ba 100644 --- a/TrafficCapture/trafficReplayer/build.gradle +++ b/TrafficCapture/trafficReplayer/build.gradle @@ -35,7 +35,7 @@ repositories { dependencies { //spotbugs 'com.github.spotbugs:spotbugs:4.7.3' def resilience4jVersion = "1.7.0"; - implementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation project(':captureProtobufs') implementation project(':coreUtilities') @@ -70,7 +70,7 @@ dependencies { testFixturesImplementation testFixtures(project(path: ':coreUtilities')) testFixturesImplementation testFixtures(project(path: ':testUtilities')) - testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.32.0") + testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") testFixturesImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testFixturesImplementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.15.0' testFixturesImplementation group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' From 0e8379d90d79765f643d10bc0c0b38819a6b0457 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 16 Jan 2024 18:14:00 -0500 Subject: [PATCH 62/94] Fix a bug where the current scope's attributes weren't being added into its own span. I've also updated linked spans to allow for more than one (will test shortly) Signed-off-by: Greg Schohn --- .../java/org/opensearch/migrations/Utils.java | 22 ++++++++++++++++++ .../tracing/BaseNestedSpanContext.java | 7 +++--- .../tracing/IInstrumentConstructor.java | 4 +++- .../tracing/IWithStartTimeAndAttributes.java | 16 ++++--------- .../migrations/tracing/RootOtelContext.java | 23 +++++++++++-------- .../InMemoryInstrumentationBundle.java | 6 +++++ .../trafficCaptureProxyServer/build.gradle | 1 + .../netty/NettyScanningHttpProxyTest.java | 9 +++++--- .../opensearch/migrations/replay/Utils.java | 20 +--------------- .../http/HttpJsonTransformingConsumer.java | 3 +-- .../migrations/replay/UtilsTest.java | 3 ++- .../migrations/replay/TestUtils.java | 3 +-- 12 files changed, 66 insertions(+), 51 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/Utils.java diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/Utils.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/Utils.java new file mode 100644 index 000000000..bb181ff09 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/Utils.java @@ -0,0 +1,22 @@ +package org.opensearch.migrations; + +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Collector; +import java.util.stream.Collectors; + +public class Utils { + /** + * See https://en.wikipedia.org/wiki/Fold_(higher-order_function) + */ + public static Collector + foldLeft(final B seedValue, final BiFunction f) { + return Collectors.collectingAndThen( + Collectors.reducing( + Function.identity(), + a -> b -> f.apply(b, a), + Function::andThen), + finisherArg -> finisherArg.apply(seedValue) + ); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index a3542963c..cb20584b6 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -8,6 +8,7 @@ import lombok.Setter; import java.time.Instant; +import java.util.stream.Stream; public abstract class BaseNestedSpanContext @@ -40,9 +41,9 @@ protected void initializeSpan(AttributesBuilder attributesBuilder) { initializeSpan(null, attributesBuilder); } - protected void initializeSpan(Span linkedSpan, AttributesBuilder attributesBuilder) { - initializeSpan(rootInstrumentationScope.buildSpan(enclosingScope, getActivityName(), - linkedSpan, attributesBuilder)); + protected void initializeSpan(Stream linkedSpans, AttributesBuilder attributesBuilder) { + initializeSpan(rootInstrumentationScope.buildSpan(this, getActivityName(), + linkedSpans, attributesBuilder)); } public void initializeSpan(@NonNull Span s) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index 571c70465..a0e13d7ac 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -4,7 +4,9 @@ import io.opentelemetry.api.trace.Span; import lombok.NonNull; +import java.util.stream.Stream; + public interface IInstrumentConstructor { - @NonNull Span buildSpan(IInstrumentationAttributes enclosingScope, String spanName, Span linkedSpan, + @NonNull Span buildSpan(IInstrumentationAttributes forScope, String spanName, Stream linkedSpans, AttributesBuilder attributesBuilder); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 7234f885b..af5181e8c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -2,6 +2,7 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.LongHistogram; import java.time.Duration; import java.time.Instant; @@ -13,23 +14,16 @@ default void meterHistogramMillis(DoubleHistogram histogram) { meterHistogramMillis(histogram, Duration.between(getStartTime(), Instant.now())); } default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { - meterHistogramMillis(histogram, value, null); - } - default void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { - meterHistogramMillis(histogram, Duration.between(getStartTime(), Instant.now()), - attributesBuilder); - } - default void meterHistogramMillis(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { - meterHistogram(histogram, value.toNanos()/1_000_000.0, attributesBuilder); + meterHistogram(histogram, value.toNanos()/1_000_000.0); } default void meterHistogram(DoubleHistogram histogram, double value) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - histogram.record(value); + histogram.record(value, getPopulatedMetricAttributes()); } } - default void meterHistogram(DoubleHistogram histogram, double value, AttributesBuilder attributesBuilder) { + default void meterHistogram(LongHistogram histogram, long value) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - histogram.record(value); + histogram.record(value, getPopulatedMetricAttributes()); } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 5df850ac1..a4b1f03f7 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -22,10 +22,13 @@ import lombok.Getter; import lombok.NonNull; import lombok.Setter; +import org.opensearch.migrations.Utils; import java.time.Duration; import java.util.Optional; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; @@ -112,24 +115,26 @@ public AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; // nothing more to do } - private static SpanBuilder addLinkedToBuilder(Span linkedSpanContext, SpanBuilder spanBuilder) { - return Optional.ofNullable(linkedSpanContext) - .map(Span::getSpanContext).map(spanBuilder::addLink).orElse(spanBuilder); + private static SpanBuilder addLinkedToBuilder(Stream linkedSpanContexts, SpanBuilder spanBuilder) { + return Optional.ofNullable(linkedSpanContexts) + .map(ss->ss.collect(Utils.foldLeft(spanBuilder, (b,s)->b.addLink(s.getSpanContext())))) + .orElse(spanBuilder); } private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan, - Span linkedSpanContext) { - return addLinkedToBuilder(linkedSpanContext, Optional.ofNullable(parentSpan) + Stream linkedSpanContexts) { + return addLinkedToBuilder(linkedSpanContexts, Optional.ofNullable(parentSpan) .map(p -> builder.setParent(Context.current().with(p))) .orElseGet(builder::setNoParent)) .startSpan().setAllAttributes(attrs); } @Override - public @NonNull Span buildSpan(IInstrumentationAttributes enclosingScope, - String spanName, Span linkedSpan, AttributesBuilder attributesBuilder) { - var parentSpan = enclosingScope.getCurrentSpan(); + public @NonNull Span buildSpan(IInstrumentationAttributes forScope, + String spanName, Stream linkedSpans, AttributesBuilder attributesBuilder) { + assert forScope.getCurrentSpan() == null; + var parentSpan = forScope.getEnclosingScope().getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); - return buildSpanWithParent(spanBuilder, getPopulatedSpanAttributes(attributesBuilder), parentSpan, linkedSpan); + return buildSpanWithParent(spanBuilder, forScope.getPopulatedSpanAttributes(), parentSpan, linkedSpans); } } diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java index 99db69e6e..ef68f62e2 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java @@ -17,6 +17,12 @@ public class InMemoryInstrumentationBundle { public final InMemorySpanExporter testSpanExporter; public final InMemoryMetricExporter testMetricExporter; + public InMemoryInstrumentationBundle(boolean collectTraces, + boolean collectMetrics) { + this(collectTraces ? InMemorySpanExporter.create() : null, + collectMetrics ? InMemoryMetricExporter.create() : null); + } + public InMemoryInstrumentationBundle(InMemorySpanExporter testSpanExporter, InMemoryMetricExporter testMetricExporter) { this.testSpanExporter = testSpanExporter; diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index 455655fbd..2526c3698 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -45,6 +45,7 @@ dependencies { testImplementation project(':captureProtobufs') testImplementation testFixtures(project(path: ':testUtilities')) testImplementation testFixtures(project(path: ':captureOffloader')) + testImplementation testFixtures(project(path: ':coreUtilities')) testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java index aad99b72c..158ed0251 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java @@ -10,6 +10,7 @@ import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; import org.opensearch.migrations.testutils.SimpleHttpResponse; import org.opensearch.migrations.testutils.SimpleHttpServer; +import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.InMemoryConnectionCaptureFactory; @@ -89,7 +90,9 @@ public void testRoundTrip() throws CountDownLatch interactionsCapturedCountdown = new CountDownLatch(NUM_EXPECTED_TRAFFIC_STREAMS); var captureFactory = new InMemoryConnectionCaptureFactory(TEST_NODE_ID_STRING, 1024*1024, () -> interactionsCapturedCountdown.countDown()); - var servers = startServers(captureFactory); + var inMemoryInstrumentationBundle = new InMemoryInstrumentationBundle(true, true); + var rootCtx = new RootWireLoggingContext(inMemoryInstrumentationBundle.openTelemetrySdk); + var servers = startServers(rootCtx, captureFactory); try (var client = new SimpleHttpClientForTesting()) { var nettyEndpoint = URI.create("http://localhost:" + servers.v1().getProxyPort() + "/"); @@ -172,7 +175,7 @@ private static String makeTestRequestViaClient(SimpleHttpClientForTesting client } private static Tuple - startServers(IConnectionCaptureFactory connectionCaptureFactory) throws + startServers(RootWireLoggingContext rootCtx, IConnectionCaptureFactory connectionCaptureFactory) throws PortFinder.ExceededMaxPortAssigmentAttemptException { var nshp = new AtomicReference(); @@ -200,7 +203,7 @@ private static String makeTestRequestViaClient(SimpleHttpClientForTesting client try { var connectionPool = new BacksideConnectionPool(testServerUri, null, 10, Duration.ofSeconds(10)); - var rootCtx = new RootWireLoggingContext(null); + nshp.get().start(rootCtx, connectionPool, 1, null, connectionCaptureFactory, new RequestCapturePredicate()); System.out.println("proxy port = " + port); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Utils.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Utils.java index ba383d458..9a4a987f2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Utils.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/Utils.java @@ -14,10 +14,6 @@ import java.util.Base64; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.stream.Collector; -import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @@ -36,20 +32,6 @@ public static long setIfLater(AtomicLong referenceValue, long pointInTimeMillis) return referenceValue.updateAndGet(existing -> Math.max(existing, pointInTimeMillis)); } - /** - * See https://en.wikipedia.org/wiki/Fold_(higher-order_function) - */ - public static Collector - foldLeft(final B seedValue, final BiFunction f) { - return Collectors.collectingAndThen( - Collectors.reducing( - Function.identity(), - a -> b -> f.apply(b, a), - Function::andThen), - finisherArg -> finisherArg.apply(seedValue) - ); - } - @SneakyThrows(value = {IOException.class}) public static String packetsToCompressedTrafficStream(Stream byteArrStream) { var tsb = TrafficStream.newBuilder() @@ -57,7 +39,7 @@ public static String packetsToCompressedTrafficStream(Stream byteArrStre var trafficStreamOfReads = byteArrStream.map(bArr->ReadObservation.newBuilder().setData(ByteString.copyFrom(bArr)).build()) .map(r->TrafficObservation.newBuilder().setRead(r)) - .collect(foldLeft(tsb, (existing,newObs)->tsb.addSubStream(newObs))) + .collect(org.opensearch.migrations.Utils.foldLeft(tsb, (existing, newObs)->tsb.addSubStream(newObs))) .build(); try (var baos = new ByteArrayOutputStream()) { try (var gzStream = new GZIPOutputStream(baos)) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index ca1918e7d..056f60c85 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -4,14 +4,13 @@ import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.HttpRequestDecoder; import lombok.extern.slf4j.Slf4j; +import org.opensearch.migrations.Utils; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; -import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/UtilsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/UtilsTest.java index f46c33efd..03b9b0fd6 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/UtilsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/UtilsTest.java @@ -3,6 +3,7 @@ import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.Utils; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -17,7 +18,7 @@ public void testFoldLeft() { .collect(Collectors.joining()); var foldedValue = - IntStream.range('A','F').mapToObj(c->(char)c+"").collect(Utils.foldLeft("", (a,b) -> a+b)); + IntStream.range('A','F').mapToObj(c->(char)c+"").collect(Utils.foldLeft("", (a, b) -> a+b)); log.info("stream concatenated value: " + foldedValue); Assertions.assertEquals(groundTruth, foldedValue); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index 6541887fe..39ccee061 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -11,11 +11,10 @@ import io.netty.handler.codec.http.HttpRequestDecoder; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; +import org.opensearch.migrations.Utils; import org.opensearch.migrations.replay.datahandlers.IPacketConsumer; import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; From a076bc31eaf00e54c086f790c2a82ad504b4bf5b Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 18 Jan 2024 18:22:56 -0500 Subject: [PATCH 63/94] Add a TestContext for every replayer test via inheritance on the Test class so that I can initialize and teardown the context (and do some sanity checks in the process). I've already found and fixed a couple issues with scoped contexts being doubly closed, which has resulted in UpDownCounters being corrupted. The final check on the TestContext to make sure that all scopes are closed isn't active yet since the results are "862 tests completed, 808 failed". Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 1 - TrafficCapture/coreUtilities/build.gradle | 1 + .../tracing/BaseNestedSpanContext.java | 7 + .../tracing/IInstrumentConstructor.java | 11 ++ .../tracing/IInstrumentationAttributes.java | 3 +- .../migrations/tracing/ContextTracker.java | 61 +++++++ .../replay/AddCompressionEncodingTest.java | 25 +-- .../replay/PayloadRepackingTest.java | 18 +- .../NettyPacketToHttpConsumer.java | 45 +++-- .../http/HttpJsonTransformingConsumer.java | 3 +- .../replay/tracing/IReplayContexts.java | 2 +- .../replay/tracing/ReplayContexts.java | 7 + .../replay/BlockingTrafficSourceTest.java | 12 +- ...afficToHttpTransactionAccumulatorTest.java | 25 +-- ...xpiringTrafficStreamMapSequentialTest.java | 12 +- ...ExpiringTrafficStreamMapUnorderedTest.java | 20 +-- .../FullReplayerWithTracingChecksTest.java | 154 ++++++++++++++++++ .../replay/FullTrafficReplayerTest.java | 138 ++-------------- .../replay/HeaderTransformerTest.java | 12 +- .../KafkaRestartingTrafficReplayerTest.java | 13 +- .../replay/ParsedHttpMessagesAsDictsTest.java | 12 +- .../replay/RequestSenderOrchestratorTest.java | 10 +- .../replay/ResultsToLogsConsumerTest.java | 21 +-- .../SigV4SigningTransformationTest.java | 13 +- ...afficToHttpTransactionAccumulatorTest.java | 11 +- .../replay/TrafficReplayerRunner.java | 12 +- .../replay/TrafficReplayerTest.java | 35 ++-- .../replay/TrafficStreamGenerator.java | 19 ++- .../NettyPacketToHttpConsumerTest.java | 20 +-- .../HttpJsonTransformingConsumerTest.java | 15 +- ...KafkaCommitsWorkBetweenLongPollsTest.java} | 9 +- .../replay/kafka/KafkaKeepAliveTests.java | 26 +-- ...KafkaTrafficCaptureSourceLongTermTest.java | 45 ++--- .../kafka/KafkaTrafficCaptureSourceTest.java | 13 +- .../replay/tracing/TracingTest.java | 11 +- .../tracing/InstrumentationTest.java | 23 +++ .../migrations/tracing/TestContext.java | 30 +++- 37 files changed, 537 insertions(+), 358 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java create mode 100644 TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java rename TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/{KafkaCommitsWorkBetweenLongPolls.java => KafkaCommitsWorkBetweenLongPollsTest.java} (92%) create mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index b703c21be..650599114 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -91,7 +91,6 @@ public StreamManager(IRootKafkaOffloaderContext rootScope, IConnectionContext ct @Override public void close() throws IOException { log.atInfo().setMessage(() -> "factory.close()").log(); - telemetryContext.close(); } @Override diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index bac6ec805..5cbd91ad8 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -71,6 +71,7 @@ dependencies { testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk-testing' + testFixturesImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' } configurations.all { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index cb20584b6..d36d20887 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -21,11 +21,18 @@ public abstract class BaseNestedSpanContext Exception observedExceptionToIncludeInMetrics; protected BaseNestedSpanContext(S rootScope, T enclosingScope) { + rootScope.onContextCreated(this); this.enclosingScope = enclosingScope; this.startTime = Instant.now(); this.rootInstrumentationScope = rootScope; } + @Override + public void endSpan() { + IScopedInstrumentationAttributes.super.endSpan(); + rootInstrumentationScope.onContextClosed(this); + } + @Override public IInstrumentationAttributes getEnclosingScope() { return enclosingScope; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index a0e13d7ac..82e193190 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -9,4 +9,15 @@ public interface IInstrumentConstructor { @NonNull Span buildSpan(IInstrumentationAttributes forScope, String spanName, Stream linkedSpans, AttributesBuilder attributesBuilder); + + /** + * For debugging, this will be overridden to track creation and termination of spans + */ + default void onContextCreated(IScopedInstrumentationAttributes newScopedContext) {} + + /** + * For debugging, this will be overridden to track creation and termination of spans + */ + default void onContextClosed(IScopedInstrumentationAttributes newScopedContext) {} + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 9a3e427c4..6c84a130a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -61,7 +61,8 @@ default void meterIncrementEvent(LongCounter c, long increment) { } default void meterDeltaEvent(LongUpDownCounter c, long delta) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - c.add(delta, getPopulatedMetricAttributes()); + var attributes = getPopulatedMetricAttributes(); + c.add(delta, attributes); } } } diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java new file mode 100644 index 000000000..7fa87c90e --- /dev/null +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java @@ -0,0 +1,61 @@ +package org.opensearch.migrations.tracing; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import java.lang.ref.WeakReference; +import java.util.HashMap; +import java.util.Map; +import java.util.WeakHashMap; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +@Slf4j +public class ContextTracker { + private static class ExceptionForStackTracingOnly extends Exception {} + @Getter + public static class CallDetails { + private final ExceptionForStackTracingOnly createStackException; + private ExceptionForStackTracingOnly closeStackException; + public CallDetails() { + createStackException = new ExceptionForStackTracingOnly(); + } + } + private final Map scopedContextToCallDetails = + new WeakHashMap<>(); + private final Object lockObject = new Object(); + + public void onCreated(IScopedInstrumentationAttributes ctx) { + synchronized (lockObject) { + var oldItem = scopedContextToCallDetails.putIfAbsent(ctx, new CallDetails()); + assert oldItem == null; + } + } + + public void onClosed(IScopedInstrumentationAttributes ctx) { + synchronized (lockObject) { + var newExceptionStack = new ExceptionForStackTracingOnly(); + var oldCallDetails = scopedContextToCallDetails.get(ctx); + assert oldCallDetails != null; + final var oldE = oldCallDetails.closeStackException; + if (oldE != null) { + log.atError().setCause(newExceptionStack).setMessage(()->"Close is being called here").log(); + log.atError().setCause(oldE).setMessage(()->"... but close was already called here").log(); + assert oldE == null; + } + oldCallDetails.closeStackException = new ExceptionForStackTracingOnly(); + } + } + + public Map getAllRemainingActiveScopes() { + synchronized (lockObject) { + return scopedContextToCallDetails.entrySet().stream() + // filter away items that were closed but not cleared yet (since it's a weak map) + .filter(kvp->kvp.getValue().closeStackException == null) + // make a copy since we're in a synchronized block + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + } +} diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java index 89d1b82fb..6ffafa521 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java @@ -6,7 +6,7 @@ import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; -import org.opensearch.migrations.tracing.TestContext; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.transform.JsonJoltTransformBuilder; import org.opensearch.migrations.transform.JsonJoltTransformer; @@ -22,7 +22,7 @@ import java.util.zip.GZIPInputStream; @Slf4j -public class AddCompressionEncodingTest { +public class AddCompressionEncodingTest extends InstrumentationTest { public static final byte BYTE_FILL_VALUE = (byte) '7'; @@ -35,29 +35,30 @@ public void addingCompressionRequestHeaderCompressesPayload() throws ExecutionEx JsonJoltTransformer.newBuilder() .addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP) .build(), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(TestContext.noTracking(), 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); final var payloadPartSize = 511; final var numParts = 1025; String sourceHeaders = "GET / HTTP/1.1\n" + "host: localhost\n" + - "content-length: " + (numParts*payloadPartSize) + "\n"; + "content-length: " + (numParts * payloadPartSize) + "\n"; - DiagnosticTrackableCompletableFuture tail = + DiagnosticTrackableCompletableFuture tail = compressingTransformer.consumeBytes(sourceHeaders.getBytes(StandardCharsets.UTF_8)) - .thenCompose(v-> compressingTransformer.consumeBytes("\n".getBytes(StandardCharsets.UTF_8)), - ()->"AddCompressionEncodingTest.compressingTransformer"); + .thenCompose(v -> compressingTransformer.consumeBytes("\n".getBytes(StandardCharsets.UTF_8)), + () -> "AddCompressionEncodingTest.compressingTransformer"); final byte[] payloadPart = new byte[payloadPartSize]; Arrays.fill(payloadPart, BYTE_FILL_VALUE); - for (var i = new AtomicInteger(numParts); i.get()>0; i.decrementAndGet()) { - tail = tail.thenCompose(v->compressingTransformer.consumeBytes(payloadPart), - ()->"AddCompressionEncodingTest.consumeBytes:"+i.get()); + for (var i = new AtomicInteger(numParts); i.get() > 0; i.decrementAndGet()) { + tail = tail.thenCompose(v -> compressingTransformer.consumeBytes(payloadPart), + () -> "AddCompressionEncodingTest.consumeBytes:" + i.get()); } var fullyProcessedResponse = - tail.thenCompose(v->compressingTransformer.finalizeRequest(), - ()->"AddCompressionEncodingTest.fullyProcessedResponse"); + tail.thenCompose(v -> compressingTransformer.finalizeRequest(), + () -> "AddCompressionEncodingTest.fullyProcessedResponse"); fullyProcessedResponse.get(); + try (var bais = new ByteArrayInputStream(testPacketCapture.getBytesCaptured()); var unzipStream = new GZIPInputStream(bais); var isr = new InputStreamReader(unzipStream, StandardCharsets.UTF_8); diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java index 8a06232b6..b6be07217 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/PayloadRepackingTest.java @@ -9,6 +9,7 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.JsonJoltTransformBuilder; import org.opensearch.migrations.transform.JsonJoltTransformer; @@ -23,7 +24,7 @@ @Slf4j @WrapWithNettyLeakDetection(repetitions = 1) -public class PayloadRepackingTest { +public class PayloadRepackingTest extends InstrumentationTest { public static Stream> expandList(Stream> stream, List possibilities) { return stream.flatMap(list-> possibilities.stream().map(innerB -> { @@ -46,8 +47,12 @@ public static Arguments[] makeCombinations() { public void testSimplePayloadTransform(boolean doGzip, boolean doChunked) throws Exception { var transformerBuilder = JsonJoltTransformer.newBuilder(); - if (doGzip) { transformerBuilder.addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP); } - if (doChunked) { transformerBuilder.addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.MAKE_CHUNKED); } + if (doGzip) { + transformerBuilder.addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP); + } + if (doChunked) { + transformerBuilder.addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.MAKE_CHUNKED); + } Random r = new Random(2); var stringParts = IntStream.range(0, 1) @@ -60,7 +65,7 @@ public void testSimplePayloadTransform(boolean doGzip, boolean doChunked) throws expectedRequestHeaders.add("host", "localhost"); expectedRequestHeaders.add("Content-Length", "46"); - TestUtils.runPipelineAndValidate(TestContext.noTracking(), transformerBuilder.build(), null, + TestUtils.runPipelineAndValidate(rootContext, transformerBuilder.build(), null, null, stringParts, expectedRequestHeaders, referenceStringBuilder -> TestUtils.resolveReferenceString(referenceStringBuilder)); } @@ -92,7 +97,8 @@ public void testJsonPayloadTransformation() throws Exception { ObjectMapper mapper = new ObjectMapper(); var simpleTransform = mapper.readValue(simplePayloadTransform, - new TypeReference>(){}); + new TypeReference>() { + }); transformerBuilder.addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.PASS_THRU); transformerBuilder.addOperationObject(simpleTransform); @@ -105,7 +111,7 @@ public void testJsonPayloadTransformation() throws Exception { expectedRequestHeaders.add("content-type", "application/json; charset=UTF-8"); expectedRequestHeaders.add("Content-Length", "55"); - TestUtils.runPipelineAndValidate(TestContext.noTracking(), transformerBuilder.build(), null, + TestUtils.runPipelineAndValidate(rootContext, transformerBuilder.build(), null, extraHeaders, List.of(jsonPayload), expectedRequestHeaders, x -> "{\"top\":[{\"Name\":\"A\",\"Value\":1},{\"Name\":\"B\",\"Value\":2}]}"); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index b78b8fa4b..b9351b1b3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -24,9 +24,7 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; import org.opensearch.migrations.replay.datahandlers.http.helpers.WriteMeteringHandler; -import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; @@ -37,6 +35,7 @@ import java.net.URI; import java.time.Instant; +import java.util.NoSuchElementException; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -51,6 +50,9 @@ public class NettyPacketToHttpConsumer implements IPacketFinalizingConsumer initialFuture = new StringTrackableCompletableFuture<>(new CompletableFuture<>(), @@ -95,7 +97,7 @@ public NettyPacketToHttpConsumer(ChannelFuture clientConnection, private & IScopedInstrumentationAttributes> - void setCurrentRequestContext(T requestSendingContext) { + void setCurrentMessageContext(T requestSendingContext) { currentRequestContextUnion = requestSendingContext; } @@ -135,7 +137,7 @@ public static ChannelFuture createClientConnection(EventLoopGroup eventLoopGroup sslEngine.setUseClientMode(true); var sslHandler = new SslHandler(sslEngine); addLoggingHandler(pipeline, "A"); - pipeline.addLast("ssl", sslHandler); + pipeline.addLast(SSL_HANDLER_NAME, sslHandler); sslHandler.handshakeFuture().addListener(handshakeFuture -> { if (handshakeFuture.isSuccess()) { rval.setSuccess(); @@ -174,15 +176,25 @@ private void activateChannelForThisConsumer() { throw new IllegalStateException("Channel " + channel + "is being used elsewhere already!"); } var pipeline = channel.pipeline(); - // add this size counter BEFORE TLS? - pipeline.addFirst(new ReadMeteringingHandler(size->{ + // add these size counters BEFORE TLS? Notice that when removing from the pipeline, we need to be more careful + pipeline.addFirst(WRITE_COUNT_WATCHER_HANDLER_NAME, new WriteMeteringHandler(size->{ + // client side, so this is the request + if (size == 0) { return; } if (!(this.currentRequestContextUnion instanceof IReplayContexts.IRequestSendingContext)) { this.getCurrentRequestSpan().close(); - this.setCurrentRequestContext(getParentContext().createHttpReceivingContext()); + this.setCurrentMessageContext(getParentContext().createHttpSendingContext()); + } + getParentContext().onBytesSent(size); + })); + pipeline.addFirst(READ_COUNT_WATCHER_HANDLER_NAME, new ReadMeteringingHandler(size->{ + // client side, so this is the response + if (size == 0) { return; } + if (!(this.currentRequestContextUnion instanceof IReplayContexts.IReceivingHttpResponseContext)) { + this.getCurrentRequestSpan().close(); + this.setCurrentMessageContext(getParentContext().createHttpReceivingContext()); } getParentContext().onBytesReceived(size); })); - pipeline.addFirst(new WriteMeteringHandler(size->getParentContext().onBytesSent(size))); addLoggingHandler(pipeline, "B"); pipeline.addLast(new BacksideSnifferHandler(responseBuilder)); addLoggingHandler(pipeline, "C"); @@ -208,6 +220,13 @@ private void deactivateChannel() { try { var pipeline = channel.pipeline(); log.atDebug().setMessage(() -> "Resetting the pipeline currently at: " + pipeline).log(); + for (var handlerName : new String[]{WRITE_COUNT_WATCHER_HANDLER_NAME, READ_COUNT_WATCHER_HANDLER_NAME}) { + try { + pipeline.remove(handlerName); + } catch (NoSuchElementException e) { + log.atDebug().setMessage(()->"Ignoring an exception that the "+handlerName+" wasn't present").log(); + } + } while (!(pipeline.last() instanceof SslHandler) && (pipeline.last() != null)) { pipeline.removeLast(); } @@ -295,8 +314,10 @@ private IReplayContexts.IReplayerHttpTransactionContext httpContext() { public DiagnosticTrackableCompletableFuture finalizeRequest() { var ff = activeChannelFuture.getDeferredFutureThroughHandle((v,t)-> { - this.getCurrentRequestSpan().close(); - this.setCurrentRequestContext(getParentContext().createWaitingForResponseContext()); + if (!(this.currentRequestContextUnion instanceof IReplayContexts.IReceivingHttpResponseContext)) { + this.getCurrentRequestSpan().close(); + this.setCurrentMessageContext(getParentContext().createWaitingForResponseContext()); + } var future = new CompletableFuture(); var rval = new DiagnosticTrackableCompletableFuture(future, @@ -309,7 +330,7 @@ private IReplayContexts.IReplayerHttpTransactionContext httpContext() { future.complete(responseBuilder.addErrorCause(t).build()); } return rval; - }, ()->"Waiting for previous consumes to set the callback") + }, ()->"Waiting for previous consumes to set the future") .map(f->f.whenComplete((v,t)-> deactivateChannel()), ()->"clearing pipeline"); log.atTrace().setMessage(()->"Chaining finalization work off of " + activeChannelFuture + ". Returning finalization future="+ff).log(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java index 056f60c85..16592f790 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumer.java @@ -136,13 +136,13 @@ public DiagnosticTrackableCompletableFuture { - transformationContext.close(); if (t != null) { transformationContext.onTransformFailure(); t = unwindPossibleCompletionException(t); if (t instanceof NoContentException) { return redriveWithoutTransformation(offloadingHandler.packetReceiver, t); } else { + transformationContext.close(); metricsLogger.atError(MetricsEvent.TRANSFORMING_REQUEST_FAILED, t) .setAttribute(MetricsAttributeKey.REQUEST_ID, transformationContext.toString()) .setAttribute(MetricsAttributeKey.CONNECTION_ID, transformationContext.getLogicalEnclosingScope().getConnectionId()) @@ -150,6 +150,7 @@ public DiagnosticTrackableCompletableFuture!possibilitiesLeftToTest.isEmpty()) .filter(c->TrafficStreamGenerator.classifyTrafficStream(possibilitiesLeftToTest, c.trafficStreams) > 0) .flatMap(c-> { @@ -84,7 +79,6 @@ public void testAccumulatedSplit(String testName, int cutPoint, void accumulateWithAccumulatorPairAtPoint(TrafficStream[] trafficStreams, int cutPoint, int[] expectedRequestSizes, int[] expectedResponseSizes) { - var ctx = TestContext.noTracking(); List reconstructedTransactions = new ArrayList<>(); AtomicInteger requestsReceived = new AtomicInteger(0); // some of the messages up to the cutPoint may not have been able to be fully committed (when the @@ -93,13 +87,13 @@ void accumulateWithAccumulatorPairAtPoint(TrafficStream[] trafficStreams, int cu // in the first pass. // // Notice that this may cause duplicates. That's by design. The system has an at-least-once guarantee. - var indicesProcessedPass1 = - SimpleCapturedTrafficToHttpTransactionAccumulatorTest.accumulateTrafficStreamsWithNewAccumulator(ctx, + var indicesProcessedPass1 = SimpleCapturedTrafficToHttpTransactionAccumulatorTest + .accumulateTrafficStreamsWithNewAccumulator(rootContext, Arrays.stream(trafficStreams).limit(cutPoint), reconstructedTransactions, requestsReceived); cutPoint = indicesProcessedPass1.isEmpty() ? 0 : indicesProcessedPass1.last(); - var indicesProcessedPass2 = - SimpleCapturedTrafficToHttpTransactionAccumulatorTest.accumulateTrafficStreamsWithNewAccumulator(ctx, - Arrays.stream(trafficStreams).skip(cutPoint), reconstructedTransactions, requestsReceived); + var indicesProcessedPass2 = SimpleCapturedTrafficToHttpTransactionAccumulatorTest + .accumulateTrafficStreamsWithNewAccumulator(rootContext, + Arrays.stream(trafficStreams).skip(cutPoint), reconstructedTransactions, requestsReceived); // three checks to do w/ the indicesProcessed sets. // Count their sum, confirm that there were not duplicates, confirm all match the input indices @@ -114,5 +108,4 @@ void accumulateWithAccumulatorPairAtPoint(TrafficStream[] trafficStreams, int cu SimpleCapturedTrafficToHttpTransactionAccumulatorTest.assertReconstructedTransactionsMatchExpectations( reconstructedTransactions, expectedRequestSizes, expectedResponseSizes); } - } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java index 0c31bcd1c..b6000245d 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java @@ -5,6 +5,7 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; @@ -15,13 +16,12 @@ import java.util.function.Function; import java.util.stream.Collectors; -class ExpiringTrafficStreamMapSequentialTest { +class ExpiringTrafficStreamMapSequentialTest extends InstrumentationTest { public static final String TEST_NODE_ID_STRING = "test_node_id"; - public static void testLinearExpirations(Function connectionGenerator, int window, int granularity, + public void testLinearExpirations(Function connectionGenerator, int window, int granularity, int expectedExpirationCounts[]) { - var context = TestContext.noTracking(); var expiredAccumulations = new ArrayList(); var expiringMap = new ExpiringTrafficStreamMap(Duration.ofSeconds(window), Duration.ofSeconds(granularity), new BehavioralPolicy() { @@ -36,15 +36,15 @@ public void onExpireAccumulation(String partitionId, for (int i=0; inew Accumulation(tsk, 0)); createdAccumulations.add(accumulation); expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, - connectionGenerator.apply(i), 0, context::createTrafficStreamContextForTest), + connectionGenerator.apply(i), 0, rootContext::createTrafficStreamContextForTest), accumulation, ts); var rrPair = createdAccumulations.get(i).getOrCreateTransactionPair( PojoTrafficStreamKeyAndContext.build("n","c",1, - context::createTrafficStreamContextForTest), Instant.EPOCH); + rootContext::createTrafficStreamContextForTest), Instant.EPOCH); rrPair.addResponseData(ts, ("Add"+i).getBytes(StandardCharsets.UTF_8)); expiredCountsPerLoop.add(expiredAccumulations.size()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java index 9a6cf584f..f5b2f2142 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java @@ -6,6 +6,7 @@ import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; @@ -17,14 +18,13 @@ import java.util.stream.Collectors; @WrapWithNettyLeakDetection(disableLeakChecks = true) -class ExpiringTrafficStreamMapUnorderedTest { +class ExpiringTrafficStreamMapUnorderedTest extends InstrumentationTest { public static final String TEST_NODE_ID_STRING = "test_node_id"; public void testExpirations(Function connectionGenerator, int window, int granularity, int timestamps[], int expectedExpirationCounts[]) { - var context = TestContext.noTracking(); var expiredAccumulations = new ArrayList(); var expiringMap = new ExpiringTrafficStreamMap(Duration.ofSeconds(window), Duration.ofSeconds(granularity), new BehavioralPolicy() { @@ -36,26 +36,26 @@ public void onExpireAccumulation(String partitionId, }); var createdAccumulations = new ArrayList(); var expiredCountsPerLoop = new ArrayList(); - for (int i=0; inew Accumulation(tsk, 0)); + rootContext::createTrafficStreamContextForTest); + var accumulation = expiringMap.getOrCreateWithoutExpiration(tsk, k -> new Accumulation(tsk, 0)); expiringMap.expireOldEntries(PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID_STRING, connectionGenerator.apply(i), 0, - context::createTrafficStreamContextForTest), + rootContext::createTrafficStreamContextForTest), accumulation, ts); createdAccumulations.add(accumulation); if (accumulation != null) { - var rrPair = accumulation.getOrCreateTransactionPair(PojoTrafficStreamKeyAndContext.build("n","c",1, - context::createTrafficStreamContextForTest), Instant.EPOCH); + var rrPair = accumulation.getOrCreateTransactionPair(PojoTrafficStreamKeyAndContext.build("n", "c", 1, + rootContext::createTrafficStreamContextForTest), Instant.EPOCH); rrPair.addResponseData(ts, ("Add" + i).getBytes(StandardCharsets.UTF_8)); } expiredCountsPerLoop.add(expiredAccumulations.size()); } Assertions.assertEquals( - Arrays.stream(expectedExpirationCounts).mapToObj(i->""+i).collect(Collectors.joining()), - expiredCountsPerLoop.stream().map(i->""+i).collect(Collectors.joining())); + Arrays.stream(expectedExpirationCounts).mapToObj(i -> "" + i).collect(Collectors.joining()), + expiredCountsPerLoop.stream().map(i -> "" + i).collect(Collectors.joining())); } @Test diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java new file mode 100644 index 000000000..9676a1aad --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -0,0 +1,154 @@ +package org.opensearch.migrations.replay; + +import com.google.protobuf.ByteString; +import com.google.protobuf.Timestamp; +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import io.opentelemetry.sdk.trace.data.SpanData; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; +import org.opensearch.migrations.testutils.SimpleNettyHttpServer; +import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.TestContext; +import org.opensearch.migrations.trafficcapture.protos.CloseObservation; +import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; +import org.opensearch.migrations.trafficcapture.protos.ReadObservation; +import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; +import org.opensearch.migrations.trafficcapture.protos.TrafficStream; +import org.opensearch.migrations.trafficcapture.protos.WriteObservation; +import org.opensearch.migrations.transform.StaticAuthTransformerFactory; + +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.Instant; +import java.util.HashSet; +import java.util.List; +import java.util.Random; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +@Slf4j +@WrapWithNettyLeakDetection(disableLeakChecks = true) +public class FullReplayerWithTracingChecksTest extends FullTrafficReplayerTest { + + protected TestContext makeContext() { return TestContext.withAllTracking(); } + + @Test + public void testSingleStreamWithCloseIsCommitted() throws Throwable { + var random = new Random(1); + var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), + response -> TestHttpServerContext.makeResponse(random, response)); + var trafficStreamWithJustClose = TrafficStream.newBuilder() + .setNodeId(TEST_NODE_ID) + .setConnectionId(TEST_CONNECTION_ID) + .addSubStream(TrafficObservation.newBuilder() + .setClose(CloseObservation.newBuilder().build()).build()) + .build(); + var trafficSourceSupplier = new FullTrafficReplayerTest.ArrayCursorTrafficSourceFactory(rootContext, + List.of(trafficStreamWithJustClose)); + TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(rootContext, 0, + httpServer.localhostEndpoint(), new FullTrafficReplayerTest.IndexWatchingListenerFactory(), trafficSourceSupplier); + Assertions.assertEquals(1, trafficSourceSupplier.nextReadCursor.get()); + log.info("done"); + } + + @ParameterizedTest + @ValueSource(ints = {1,2}) + public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) throws Throwable { + var random = new Random(1); + var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), + response->TestHttpServerContext.makeResponse(random, response)); + var baseTime = Instant.now(); + var fixedTimestamp = + Timestamp.newBuilder().setSeconds(baseTime.getEpochSecond()).setNanos(baseTime.getNano()).build(); + var tsb = TrafficStream.newBuilder().setConnectionId("C"); + for (int i=0; i(); + try (var blockingTrafficSource = new BlockingTrafficSource(trafficSource, Duration.ofMinutes(2))) { + tr.setupRunAndWaitForReplayWithShutdownChecks(Duration.ofSeconds(70), blockingTrafficSource, + new TimeShifter(10 * 1000), (t) -> { + var key = t.uniqueRequestKey; + var wasNew = tuplesReceived.add(key.toString()); + Assertions.assertTrue(wasNew); + }); + } finally { + tr.shutdown(null); + } + + Assertions.assertEquals(numRequests, tuplesReceived.size()); + checkSpansForSimpleReplayedTransactions(rootContext.inMemoryInstrumentationBundle.testSpanExporter, + numRequests); + log.info("done"); + } + + /** + * This function is written like this rather than with a loop so that the backtrace will show WHICH + * key was corrupted. + */ + private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSpanExporter, int numRequests) { + var byName = testSpanExporter.getFinishedSpanItems().stream().collect(Collectors.groupingBy(SpanData::getName)); + BiConsumer chk = (i, k) -> { + Assertions.assertNotNull(byName.get(k)); + Assertions.assertEquals(i, byName.get(k).size()); + byName.remove(k); + }; + chk.accept(1,"channel"); + chk.accept(1, "trafficStreamLifetime"); + chk.accept(numRequests, "httpTransaction"); + chk.accept(numRequests, "accumulatingRequest"); + chk.accept(numRequests, "accumulatingResponse"); + chk.accept(numRequests, "transformation"); + chk.accept(numRequests, "targetTransaction"); + chk.accept(numRequests*2, "scheduled"); + chk.accept(numRequests, "requestSending"); + chk.accept(numRequests, "tupleHandling"); + + Consumer chkNonZero = k-> { + Assertions.assertNotNull(byName.get(k)); + Assertions.assertFalse(byName.get(k).isEmpty()); + byName.remove(k); + }; + chkNonZero.accept("waitingForResponse"); + chkNonZero.accept("readNextTrafficStreamChunk"); + // ideally, we'd be getting these back too, but our requests are malformed, so the server closes, which + // may occur before we've started to accumulate the response. So - just ignore these, but make sure that + // there isn't anything else that we've missed. + byName.remove("receivingResponse"); + + Assertions.assertEquals("", byName.entrySet().stream() + .map(kvp->kvp.getKey()+":"+kvp.getValue()).collect(Collectors.joining())); + } +} diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index 75ed95f8d..7b55030ce 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -10,7 +10,6 @@ import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.ValueSource; @@ -24,6 +23,7 @@ import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; @@ -57,13 +57,13 @@ // to the test server, a shutdown will stop those work threads without letting them flush through all of their work // (since that could take a very long time) and some of the work might have been followed by resource releases. @WrapWithNettyLeakDetection(disableLeakChecks = true) -public class FullTrafficReplayerTest { +public class FullTrafficReplayerTest extends InstrumentationTest { public static final int INITIAL_STOP_REPLAYER_REQUEST_COUNT = 1; public static final String TEST_NODE_ID = "TestNodeId"; public static final String TEST_CONNECTION_ID = "testConnectionId"; - private static class IndexWatchingListenerFactory implements Supplier> { + protected static class IndexWatchingListenerFactory implements Supplier> { AtomicInteger nextStopPointRef = new AtomicInteger(INITIAL_STOP_REPLAYER_REQUEST_COUNT); @Override @@ -82,122 +82,6 @@ public Consumer get() { } } - @Test - public void testSingleStreamWithCloseIsCommitted() throws Throwable { - var random = new Random(1); - var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), - response->TestHttpServerContext.makeResponse(random, response)); - var trafficStreamWithJustClose = TrafficStream.newBuilder() - .setNodeId(TEST_NODE_ID) - .setConnectionId(TEST_CONNECTION_ID) - .addSubStream(TrafficObservation.newBuilder() - .setClose(CloseObservation.newBuilder().build()).build()) - .build(); - var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(TestContext.noTracking(), - List.of(trafficStreamWithJustClose)); - TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(0, - httpServer.localhostEndpoint(), new IndexWatchingListenerFactory(), trafficSourceSupplier); - Assertions.assertEquals(1, trafficSourceSupplier.nextReadCursor.get()); - log.info("done"); - } - - @ParameterizedTest - @ValueSource(ints = {1,2}) - public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) throws Throwable { - var random = new Random(1); - var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), - response->TestHttpServerContext.makeResponse(random, response)); - var baseTime = Instant.now(); - var fixedTimestamp = - Timestamp.newBuilder().setSeconds(baseTime.getEpochSecond()).setNanos(baseTime.getNano()).build(); - var tsb = TrafficStream.newBuilder().setConnectionId("C"); - for (int i=0; i(); - try (var blockingTrafficSource = new BlockingTrafficSource(trafficSource, Duration.ofMinutes(2))) { - tr.setupRunAndWaitForReplayWithShutdownChecks(Duration.ofSeconds(70), blockingTrafficSource, - new TimeShifter(10 * 1000), (t) -> { - var key = t.uniqueRequestKey; - var wasNew = tuplesReceived.add(key.toString()); - Assertions.assertTrue(wasNew); - }); - } finally { - tr.shutdown(null); - } - - Assertions.assertEquals(numRequests, tuplesReceived.size()); - checkSpansForSimpleReplayedTransactions(trackingContext.inMemoryInstrumentationBundle.testSpanExporter, - numRequests); - log.info("done"); - } - - /** - * This function is written like this rather than with a loop so that the backtrace will show WHICH - * key was corrupted. - */ - private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSpanExporter, int numRequests) { - var byName = testSpanExporter.getFinishedSpanItems().stream().collect(Collectors.groupingBy(SpanData::getName)); - BiConsumer chk = (i, k) -> { - Assertions.assertNotNull(byName.get(k)); - Assertions.assertEquals(i, byName.get(k).size()); - byName.remove(k); - }; - chk.accept(1,"channel"); - chk.accept(1, "trafficStreamLifetime"); - chk.accept(numRequests, "httpTransaction"); - chk.accept(numRequests, "accumulatingRequest"); - chk.accept(numRequests, "accumulatingResponse"); - chk.accept(numRequests, "transformation"); - chk.accept(numRequests, "targetTransaction"); - chk.accept(numRequests*2, "scheduled"); - chk.accept(numRequests, "requestSending"); - chk.accept(numRequests, "waitingForResponse"); - chk.accept(numRequests, "tupleHandling"); - - Consumer chkNonZero = k-> { - Assertions.assertNotNull(byName.get(k)); - Assertions.assertFalse(byName.get(k).isEmpty()); - byName.remove(k); - }; - chkNonZero.accept("readNextTrafficStreamChunk"); - // ideally, we'd be getting these back too, but our requests are malformed, so the server closes, which - // may occur before we've started to accumulate the response. So - just ignore these, but make sure that - // there isn't anything else that we've missed. - byName.remove("receivingResponse"); - - Assertions.assertEquals("", byName.entrySet().stream() - .map(kvp->kvp.getKey()+":"+kvp.getValue()).collect(Collectors.joining())); - } - @ParameterizedTest @CsvSource(value = { "3,false", @@ -209,15 +93,15 @@ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSp public void fullTest(int testSize, boolean randomize) throws Throwable { var random = new Random(1); var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(200), - response->TestHttpServerContext.makeResponse(random,response)); - var streamAndConsumer = TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(testSize, randomize); + response -> TestHttpServerContext.makeResponse(random, response)); + var streamAndConsumer = + TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(rootContext, testSize, randomize); var numExpectedRequests = streamAndConsumer.numHttpTransactions; var trafficStreams = streamAndConsumer.stream.collect(Collectors.toList()); - log.atInfo().setMessage(()->trafficStreams.stream().map(ts->TrafficStreamUtils.summarizeTrafficStream(ts)) - .collect(Collectors.joining("\n"))).log(); - var rootContext = TestContext.noTracking(); + log.atInfo().setMessage(() -> trafficStreams.stream().map(ts -> TrafficStreamUtils.summarizeTrafficStream(ts)) + .collect(Collectors.joining("\n"))).log(); var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(rootContext, trafficStreams); - TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(numExpectedRequests, + TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(rootContext, numExpectedRequests, httpServer.localhostEndpoint(), new IndexWatchingListenerFactory(), trafficSourceSupplier); Assertions.assertEquals(trafficSourceSupplier.trafficStreamsList.size(), trafficSourceSupplier.nextReadCursor.get()); log.info("done"); @@ -251,7 +135,7 @@ public int compareTo(TrafficStreamCursorKey other) { } } - private static class ArrayCursorTrafficSourceFactory implements Supplier { + protected static class ArrayCursorTrafficSourceFactory implements Supplier { private final TestContext rootContext; List trafficStreamsList; AtomicInteger nextReadCursor = new AtomicInteger(); @@ -268,7 +152,7 @@ public ISimpleTrafficCaptureSource get() { } } - private static class ArrayCursorTrafficCaptureSource implements ISimpleTrafficCaptureSource { + protected static class ArrayCursorTrafficCaptureSource implements ISimpleTrafficCaptureSource { final AtomicInteger readCursor; final PriorityQueue pQueue = new PriorityQueue<>(); Integer cursorHighWatermark; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java index f78130cc1..a6955fdf8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java @@ -7,6 +7,7 @@ import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.StaticAuthTransformerFactory; @@ -22,21 +23,20 @@ @Slf4j @WrapWithNettyLeakDetection -public class HeaderTransformerTest { +public class HeaderTransformerTest extends InstrumentationTest { private static final String SILLY_TARGET_CLUSTER_NAME = "remoteguest"; private static final String SOURCE_CLUSTER_NAME = "localhost"; @Test public void testTransformer() throws Exception { - var context = TestContext.noTracking(); // mock object. values don't matter at all - not what we're testing final var dummyAggregatedResponse = new TransformedTargetRequestAndResponse(null, 17, null, null, HttpRequestTransformationStatus.COMPLETED, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformer = new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME); var transformingHandler = new HttpJsonTransformingConsumer(transformer, null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(context, 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + "HoSt: " + SOURCE_CLUSTER_NAME + "\r\n" + @@ -80,7 +80,6 @@ private void runRandomPayloadWithTransformer(HttpJsonTransformingConsumer "GET / HTTP/1.1\r\n" + @@ -106,7 +105,6 @@ public void testMalformedPayloadIsPassedThrough() throws Exception { */ @Test public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exception { - var ctx = TestContext.noTracking(); var referenceStringBuilder = new StringBuilder(); // mock object. values don't matter at all - not what we're testing final var dummyAggregatedResponse = new TransformedTargetRequestAndResponse(null, 12, null, @@ -116,7 +114,7 @@ public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exce var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME, null, "[{\"JsonTransformerForOpenSearch23PlusTargetTransformerProvider\":\"\"}]"), - null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(ctx, 0)); + null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); Random r = new Random(2); var stringParts = IntStream.range(0, 1).mapToObj(i-> TestUtils.makeRandomString(r, 10)).map(o->(String)o) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index 521a58bd0..d85044fea 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -16,7 +16,7 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; @@ -39,7 +39,7 @@ @Slf4j @Testcontainers(disabledWithoutDocker = true) @Tag("requiresDocker") -public class KafkaRestartingTrafficReplayerTest { +public class KafkaRestartingTrafficReplayerTest extends InstrumentationTest { public static final int INITIAL_STOP_REPLAYER_REQUEST_COUNT = 1; public static final String TEST_GROUP_CONSUMER_ID = "TEST_GROUP_CONSUMER_ID"; public static final String TEST_GROUP_PRODUCER_ID = "TEST_GROUP_PRODUCER_ID"; @@ -88,17 +88,18 @@ public void fullTest(int testSize, boolean randomize) throws Throwable { var random = new Random(1); var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), response->TestHttpServerContext.makeResponse(random, response)); - var streamAndConsumer = TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(testSize, randomize); + var streamAndConsumer = + TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(rootContext, testSize, randomize); var trafficStreams = streamAndConsumer.stream.collect(Collectors.toList()); log.atInfo().setMessage(()->trafficStreams.stream().map(TrafficStreamUtils::summarizeTrafficStream) .collect(Collectors.joining("\n"))).log(); loadStreamsToKafka(buildKafkaConsumer(), Streams.concat(trafficStreams.stream(), Stream.of(SENTINEL_TRAFFIC_STREAM))); - TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(streamAndConsumer.numHttpTransactions, + TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(rootContext, streamAndConsumer.numHttpTransactions, httpServer.localhostEndpoint(), new CounterLimitedReceiverFactory(), () -> new SentinelSensingTrafficSource( - new KafkaTrafficCaptureSource(TestContext.noTracking(), buildKafkaConsumer(), TEST_TOPIC_NAME, + new KafkaTrafficCaptureSource(rootContext, buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)))); log.info("done"); } @@ -181,7 +182,7 @@ Producer buildKafkaProducer() { throw Lombok.sneakyThrow(e); } }); - return () -> new KafkaTrafficCaptureSource(TestContext.noTracking(), kafkaConsumer, TEST_TOPIC_NAME, + return () -> new KafkaTrafficCaptureSource(rootCtx, kafkaConsumer, TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index 9290be8e6..89688a605 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -5,18 +5,13 @@ import org.opensearch.migrations.replay.datatypes.MockMetricsBuilder; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import java.util.Map; import java.util.Optional; -class ParsedHttpMessagesAsDictsTest { - - static TestContext rootContext = TestContext.noTracking(); - - private static final PojoTrafficStreamKeyAndContext TEST_TRAFFIC_STREAM_KEY = - PojoTrafficStreamKeyAndContext.build("N","C",1, - k->rootContext.createTrafficStreamContextForStreamSource(rootContext.createChannelContext(k), k)); +class ParsedHttpMessagesAsDictsTest extends InstrumentationTest { ParsedHttpMessagesAsDicts makeTestData() { return makeTestData(null, null); @@ -30,6 +25,9 @@ ParsedHttpMessagesAsDicts makeTestData(Map sourceResponse, MaprootContext.createTrafficStreamContextForStreamSource(rootContext.createChannelContext(k), k)); metricsBuilder = (MockMetricsBuilder) parsedMessage.buildStatusCodeMetrics(metricsBuilder, new PojoUniqueSourceRequestKey(TEST_TRAFFIC_STREAM_KEY, 0)); return metricsBuilder.getLoggedAttributes(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index 2040f278d..5b223d522 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -11,6 +11,7 @@ import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; @@ -23,7 +24,7 @@ @Slf4j @WrapWithNettyLeakDetection(repetitions = 1) -class RequestSenderOrchestratorTest { +class RequestSenderOrchestratorTest extends InstrumentationTest { public static final int NUM_REQUESTS_TO_SCHEDULE = 20; public static final int NUM_REPEATS = 2; @@ -31,7 +32,6 @@ class RequestSenderOrchestratorTest { @Test @Tag("longTest") public void testThatSchedulingWorks() throws Exception { - var ctx = TestContext.noTracking(); var httpServer = SimpleHttpServer.makeServer(false, r -> TestHttpServerContext.makeResponse(r, Duration.ofMillis(100))); var testServerUri = httpServer.localhostEndpoint(); @@ -41,7 +41,7 @@ public void testThatSchedulingWorks() throws Exception { Instant lastEndTime = baseTime; var scheduledItems = new ArrayList>(); for (int i = 0; i 0); var httpMessage = HttpByteBufFormatter.parseHttpMessageFromBufs(HttpByteBufFormatter.HttpMessageType.RESPONSE, arr.responsePackets.stream().map(kvp->Unpooled.wrappedBuffer(kvp.getValue())), false); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 80d29ef02..1f621656a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -20,6 +20,7 @@ import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import java.io.IOException; @@ -33,7 +34,7 @@ @Slf4j @WrapWithNettyLeakDetection(repetitions = 4) -class ResultsToLogsConsumerTest { +class ResultsToLogsConsumerTest extends InstrumentationTest { private static final String NODE_ID = "n"; private static final ObjectMapper mapper = new ObjectMapper(); public static final String TEST_EXCEPTION_MESSAGE = "TEST_EXCEPTION"; @@ -77,10 +78,9 @@ public void testTupleNewWithNullKeyThrows() { @Test public void testOutputterWithNulls() throws IOException { - var context = TestContext.noTracking(); var emptyTuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - context::createTrafficStreamContextForTest), 0, 0), + new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID, "c", 0, + rootContext::createTrafficStreamContextForTest), 0, 0), null, null, null, null, null, null); try (var closeableLogSetup = new CloseableLogSetup()) { var consumer = new TupleParserChainConsumer(null, new ResultsToLogsConsumer()); @@ -94,11 +94,10 @@ public void testOutputterWithNulls() throws IOException { @Test public void testOutputterWithException() throws IOException { - var context = TestContext.noTracking(); var exception = new Exception(TEST_EXCEPTION_MESSAGE); var emptyTuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - context::createTrafficStreamContextForTest), 0, 0), + new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID, "c", 0, + rootContext::createTrafficStreamContextForTest), 0, 0), null, null, null, null, exception, null); try (var closeableLogSetup = new CloseableLogSetup()) { @@ -231,11 +230,9 @@ public void testOutputterForPost() throws IOException { testOutputterForRequest("post_formUrlEncoded_withFixedLength.txt", EXPECTED_LOGGED_OUTPUT); } - @Test - private void testOutputterForRequest(String requestResourceName, String expected) throws IOException { - var context = TestContext.noTracking(); - var trafficStreamKey = PojoTrafficStreamKeyAndContext.build(NODE_ID,"c",0, - context::createTrafficStreamContextForTest); + public void testOutputterForRequest(String requestResourceName, String expected) throws IOException { + var trafficStreamKey = PojoTrafficStreamKeyAndContext.build(NODE_ID, "c", 0, + rootContext::createTrafficStreamContextForTest); var sourcePair = new RequestResponsePacketPair(trafficStreamKey, Instant.EPOCH, 0, 0); var rawRequestData = loadResourceAsBytes("/requests/raw/" + requestResourceName); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java index 85cf9c80f..82f43e31b 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java @@ -1,17 +1,15 @@ package org.opensearch.migrations.replay; -import io.netty.buffer.Unpooled; -import io.netty.handler.codec.base64.Base64; import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.util.ResourceLeakDetector; import org.junit.jupiter.api.Test; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; -import java.nio.charset.StandardCharsets; import java.time.Clock; import java.time.Instant; import java.time.ZoneOffset; @@ -21,7 +19,7 @@ @WrapWithNettyLeakDetection -public class SigV4SigningTransformationTest { +public class SigV4SigningTransformationTest extends InstrumentationTest { private static class MockCredentialsProvider implements AwsCredentialsProvider { @Override @@ -53,13 +51,14 @@ public void testSignatureProperlyApplied() throws Exception { "SignedHeaders=host;x-amz-content-sha256;x-amz-date, " + "Signature=4cb1c423e6fe61216fbaa11398260af7f8daa85e74cd41428711e4df5cd70c97"); expectedRequestHeaders.add("x-amz-content-sha256", - "fc0e8e9a1f7697f510bfdd4d55b8612df8a0140b4210967efd87ee9cb7104362"); + "fc0e8e9a1f7697f510bfdd4d55b8612df8a0140b4210967efd87ee9cb7104362"); expectedRequestHeaders.add("X-Amz-Date", "19700101T000000Z"); - TestUtils.runPipelineAndValidate(TestContext.noTracking(), + TestUtils.runPipelineAndValidate(rootContext, msg -> new SigV4Signer(mockCredentialsProvider, "es", "us-east-1", "https", - () -> Clock.fixed(Instant.EPOCH, ZoneOffset.UTC)), + () -> Clock.fixed(Instant.EPOCH, ZoneOffset.UTC)), null, stringParts, expectedRequestHeaders, referenceStringBuilder -> TestUtils.resolveReferenceString(referenceStringBuilder)); + } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 542bb513e..46d935059 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -17,6 +17,7 @@ import org.opensearch.migrations.replay.datatypes.RawPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; @@ -52,7 +53,7 @@ * @return */ @Slf4j -public class SimpleCapturedTrafficToHttpTransactionAccumulatorTest { +public class SimpleCapturedTrafficToHttpTransactionAccumulatorTest extends InstrumentationTest { public static final int MAX_COMMANDS_IN_CONNECTION = 256; @@ -123,7 +124,7 @@ static TrafficStream[] makeTrafficStreams(int bufferSize, int interactionOffset, var connectionFactory = buildSerializerFactory(bufferSize, ()->{}); var tsk = PojoTrafficStreamKeyAndContext.build("n", "test_"+uniqueIdCounter.incrementAndGet(), 0, rootContext::createTrafficStreamContextForTest); - var offloader = connectionFactory.createOffloader(TestContext.noTracking().createChannelContext(tsk)); + var offloader = connectionFactory.createOffloader(rootContext.createChannelContext(tsk)); for (var directive : directives) { serializeEvent(offloader, interactionOffset++, directive); } @@ -195,12 +196,12 @@ public static Tuple2 unzipRequestResponseSizes(List collat @MethodSource("loadSimpleCombinations") void generateAndTest(String testName, int bufferSize, int skipCount, List directives, List expectedSizes) throws Exception { - var context = TestContext.noTracking(); var trafficStreams = Arrays.stream(makeTrafficStreams(bufferSize, 0, new AtomicInteger(), - directives, TestContext.noTracking())).skip(skipCount); + directives, rootContext)).skip(skipCount); List reconstructedTransactions = new ArrayList<>(); AtomicInteger requestsReceived = new AtomicInteger(0); - accumulateTrafficStreamsWithNewAccumulator(context, trafficStreams, reconstructedTransactions, requestsReceived); + accumulateTrafficStreamsWithNewAccumulator(rootContext, trafficStreams, reconstructedTransactions, + requestsReceived); var splitSizes = unzipRequestResponseSizes(expectedSizes); assertReconstructedTransactionsMatchExpectations(reconstructedTransactions, splitSizes._1, splitSizes._2); Assertions.assertEquals(requestsReceived.get(), reconstructedTransactions.size()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index 60b7fdf6c..3add84204 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -5,10 +5,8 @@ import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; -import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.StaticAuthTransformerFactory; import org.slf4j.event.Level; @@ -19,7 +17,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Optional; -import java.util.Random; import java.util.StringJoiner; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; @@ -38,7 +35,7 @@ static class FabricatedErrorToKillTheReplayer extends Error { private TrafficReplayerRunner() {} - static void runReplayerUntilSourceWasExhausted(int numExpectedRequests, URI endpoint, + static void runReplayerUntilSourceWasExhausted(TestContext rootContext, int numExpectedRequests, URI endpoint, Supplier> tupleListenerSupplier, Supplier trafficSourceSupplier) throws Throwable { @@ -54,7 +51,7 @@ static void runReplayerUntilSourceWasExhausted(int numExpectedRequests, URI endp var counter = new AtomicInteger(); var tupleReceiver = tupleListenerSupplier.get(); try { - runTrafficReplayer(trafficSourceSupplier, endpoint, (t) -> { + runTrafficReplayer(rootContext, trafficSourceSupplier, endpoint, (t) -> { if (runNumber != runNumberRef.get()) { // for an old replayer. I'm not sure why shutdown isn't blocking until all threads are dead, // but that behavior only impacts this test as far as I can tell. @@ -137,11 +134,12 @@ static void runReplayerUntilSourceWasExhausted(int numExpectedRequests, URI endp Assertions.assertEquals(numExpectedRequests, totalUniqueEverReceived.get()); } - private static void runTrafficReplayer(Supplier captureSourceSupplier, + private static void runTrafficReplayer(TestContext rootContext, + Supplier captureSourceSupplier, URI endpoint, Consumer tupleReceiver) throws Exception { log.info("Starting a new replayer and running it"); - var tr = new TrafficReplayer(TestContext.noTracking(), endpoint, null, + var tr = new TrafficReplayer(rootContext, endpoint, null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10*1024); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 5d144e0f4..a543c80b0 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -12,6 +12,7 @@ import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.ConnectionExceptionObservation; @@ -39,7 +40,7 @@ @Slf4j @WrapWithNettyLeakDetection -class TrafficReplayerTest { +class TrafficReplayerTest extends InstrumentationTest { public static final String TEST_NODE_ID_STRING = "test_node_id"; private static String TEST_TRAFFIC_STREAM_ID_STRING = "testId"; @@ -112,7 +113,6 @@ private static Timestamp getProtobufTimestamp(Instant t) { public void testDelimitedDeserializer() throws Exception { final Instant timestamp = Instant.now(); byte[] serializedChunks = synthesizeTrafficStreamsIntoByteArray(timestamp, 3); - var rootContext = TestContext.noTracking(); try (var bais = new ByteArrayInputStream(serializedChunks)) { AtomicInteger counter = new AtomicInteger(0); var allMatch = new AtomicBoolean(true); @@ -152,7 +152,7 @@ static byte[] synthesizeTrafficStreamsIntoByteArray(Instant timestamp, int numSt @Test public void testReader() throws Exception { - var tr = new TrafficReplayer(TestContext.noTracking(), + var tr = new TrafficReplayer(rootContext, new URI("http://localhost:9200"), null, null, false); List> byteArrays = new ArrayList<>(); CapturedTrafficToHttpTransactionAccumulator trafficAccumulator = @@ -178,7 +178,8 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, IReplayContexts.IChannelKeyContext ctx, - @NonNull List trafficStreamKeysBeingHeld) {} + @NonNull List trafficStreamKeysBeingHeld) { + } @Override public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, @@ -188,23 +189,25 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel @NonNull List trafficStreamKeysBeingHeld) { } - @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx) {} + @Override + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, + IReplayContexts.IChannelKeyContext ctx) { + } }); var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); try (var bais = new ByteArrayInputStream(bytes)) { - try (var trafficSource = new InputStreamOfTraffic(TestContext.noTracking(), bais)) { + try (var trafficSource = new InputStreamOfTraffic(rootContext, bais)) { tr.pullCaptureFromSourceToAccumulator(trafficSource, trafficAccumulator); } } Assertions.assertEquals(1, byteArrays.size()); - Assertions.assertTrue(byteArrays.stream().allMatch(ba->ba.size()==2)); + Assertions.assertTrue(byteArrays.stream().allMatch(ba -> ba.size() == 2)); } @Test public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { - var tr = new TrafficReplayer(TestContext.noTracking(), + var tr = new TrafficReplayer(rootContext, new URI("http://localhost:9200"), null, null, false); List> byteArrays = new ArrayList<>(); var remainingAccumulations = new AtomicInteger(); @@ -233,7 +236,8 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, IReplayContexts.IChannelKeyContext ctx, - @NonNull List trafficStreamKeysBeingHeld) {} + @NonNull List trafficStreamKeysBeingHeld) { + } @Override public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, @@ -241,8 +245,11 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } - @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx) {} + + @Override + public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, + IReplayContexts.IChannelKeyContext ctx) { + } } ); byte[] serializedChunks; @@ -263,13 +270,13 @@ public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channel } try (var bais = new ByteArrayInputStream(serializedChunks)) { - try (var trafficSource = new InputStreamOfTraffic(TestContext.noTracking(), bais)) { + try (var trafficSource = new InputStreamOfTraffic(rootContext, bais)) { tr.pullCaptureFromSourceToAccumulator(trafficSource, trafficAccumulator); } } trafficAccumulator.close(); Assertions.assertEquals(2, byteArrays.size()); - Assertions.assertTrue(byteArrays.stream().allMatch(ba->ba.size()==2)); + Assertions.assertTrue(byteArrays.stream().allMatch(ba -> ba.size() == 2)); Assertions.assertEquals(0, remainingAccumulations.get()); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java index bb413221d..421eba8b2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficStreamGenerator.java @@ -211,7 +211,7 @@ private static void fillCommandsAndSizes(Random r, double cancelRequestLikelihoo @SneakyThrows private static TrafficStream[] - fillCommandsAndSizesForSeed(long rSeed, AtomicInteger uniqueIdCounter, + fillCommandsAndSizesForSeed(TestContext rootContext, long rSeed, AtomicInteger uniqueIdCounter, ArrayList commands, ArrayList sizes) { var r2 = new Random(rSeed); @@ -224,7 +224,7 @@ private static void fillCommandsAndSizes(Random r, double cancelRequestLikelihoo var flushLikelihood = Math.pow(r2.nextDouble(),2.0); fillCommandsAndSizes(r2, flushLikelihood/4, flushLikelihood, bufferBound, commands, sizes); return SimpleCapturedTrafficToHttpTransactionAccumulatorTest.makeTrafficStreams(bufferSize, (int) rSeed, - uniqueIdCounter, commands, TestContext.noTracking()); + uniqueIdCounter, commands, rootContext); } /** @@ -269,10 +269,10 @@ public static class StreamAndExpectedSizes { } static StreamAndExpectedSizes - generateStreamAndSumOfItsTransactions(int count, boolean randomize) { + generateStreamAndSumOfItsTransactions(TestContext rootContext, int count, boolean randomize) { var generatedCases = count > 0 ? - generateRandomTrafficStreamsAndSizes(IntStream.range(0,count)) : - generateAllIndicativeRandomTrafficStreamsAndSizes(); + generateRandomTrafficStreamsAndSizes(rootContext, IntStream.range(0, count)) : + generateAllIndicativeRandomTrafficStreamsAndSizes(rootContext); var testCaseArr = generatedCases.toArray(RandomTrafficStreamAndTransactionSizes[]::new); log.atInfo().setMessage(()-> "test case array = \n" + Arrays.stream(testCaseArr) @@ -289,12 +289,12 @@ public static class StreamAndExpectedSizes { } public static Stream - generateRandomTrafficStreamsAndSizes(IntStream seedStream) { + generateRandomTrafficStreamsAndSizes(TestContext rootContext, IntStream seedStream) { var uniqueIdCounter = new AtomicInteger(); return seedStream.mapToObj(rSeed->{ var commands = new ArrayList(); var sizes = new ArrayList(); - var trafficStreams = fillCommandsAndSizesForSeed(rSeed, uniqueIdCounter, commands, sizes); + var trafficStreams = fillCommandsAndSizesForSeed(rootContext, rSeed, uniqueIdCounter, commands, sizes); var splitSizes = SimpleCapturedTrafficToHttpTransactionAccumulatorTest.unzipRequestResponseSizes(sizes); return new RandomTrafficStreamAndTransactionSizes(rSeed, trafficStreams, @@ -302,8 +302,9 @@ public static class StreamAndExpectedSizes { }).filter(o->o!=null); } - public static Stream generateAllIndicativeRandomTrafficStreamsAndSizes() { - return generateRandomTrafficStreamsAndSizes( + public static Stream + generateAllIndicativeRandomTrafficStreamsAndSizes(TestContext rootContext) { + return generateRandomTrafficStreamsAndSizes(rootContext, RANDOM_GENERATOR_SEEDS_FOR_SUFFICIENT_TRAFFIC_VARIANCE.stream().mapToInt(i->i)); } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index e2392a0f5..265562f57 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -27,6 +27,7 @@ import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import javax.net.ssl.SSLException; @@ -46,7 +47,7 @@ @Slf4j @WrapWithNettyLeakDetection -public class NettyPacketToHttpConsumerTest { +public class NettyPacketToHttpConsumerTest extends InstrumentationTest { public static final String SERVER_RESPONSE_BODY = "I should be decrypted tester!\n"; @@ -123,7 +124,6 @@ private static SimpleHttpResponse makeContext(HttpFirstLine request) { @ParameterizedTest @ValueSource(booleans = {false, true}) public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Exception { - var ctx = TestContext.noTracking(); for (int i = 0; i < 3; ++i) { var testServer = testServers.get(useTls); var sslContext = !testServer.localhostEndpoint().getScheme().toLowerCase().equals("https") ? null : @@ -132,7 +132,7 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except new NioEventLoopGroup(4, new DefaultThreadFactory("test")), testServer.localhostEndpoint(), sslContext, - TestRequestKey.getTestConnectionRequestContext(ctx, 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); nphc.consumeBytes((EXPECTED_REQUEST_STRING).getBytes(StandardCharsets.UTF_8)); var aggregatedResponse = nphc.finalizeRequest().get(); var responseBytePackets = aggregatedResponse.getCopyOfPackets(); @@ -147,11 +147,9 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except @ParameterizedTest @ValueSource(booleans = {false, true}) public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) - throws SSLException, ExecutionException, InterruptedException - { - var rootCtx = TestContext.noTracking(); + throws SSLException, ExecutionException, InterruptedException { var testServer = testServers.get(useTls); - var sslContext = !testServer.localhostEndpoint().getScheme().toLowerCase().equals("https") ? null : + var sslContext = !testServer.localhostEndpoint().getScheme().equalsIgnoreCase("https") ? null : SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); var transformingHttpHandlerFactory = new PacketToTransformingHttpHandlerFactory( new TransformationLoader().getTransformerFactoryLoader(null), null); @@ -161,13 +159,13 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) new RequestSenderOrchestrator( new ClientConnectionPool(testServer.localhostEndpoint(), sslContext, 1)), new TestFlowController(), timeShifter); - for (int j=0; j<2; ++j) { + for (int j = 0; j < 2; ++j) { for (int i = 0; i < 2; ++i) { - var ctx = TestRequestKey.getTestConnectionRequestContext(rootCtx, "TEST_"+i, j); + var ctx = TestRequestKey.getTestConnectionRequestContext(rootContext, "TEST_" + i, j); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, sendingFactory, ctx, Instant.now(), Instant.now(), ctx.getReplayerRequestKey(), - ()->Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); - log.info("requestFinishFuture="+requestFinishFuture); + () -> Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); + log.info("requestFinishFuture=" + requestFinishFuture); var aggregatedResponse = requestFinishFuture.get(); log.debug("Got aggregated response=" + aggregatedResponse); Assertions.assertNull(aggregatedResponse.getError()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java index 1b0c2b3f5..b6157526c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java @@ -8,6 +8,7 @@ import org.opensearch.migrations.replay.TransformationLoader; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.IJsonTransformer; import org.opensearch.migrations.transform.JsonCompositeTransformer; @@ -19,10 +20,9 @@ import java.util.Map; @WrapWithNettyLeakDetection -class HttpJsonTransformingConsumerTest { +class HttpJsonTransformingConsumerTest extends InstrumentationTest { @Test public void testPassThroughSinglePacketPost() throws Exception { - var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); @@ -30,7 +30,7 @@ public void testPassThroughSinglePacketPost() throws Exception { new HttpJsonTransformingConsumer(new TransformationLoader() .getTransformerFactoryLoader(null), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(ctx, 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -45,14 +45,13 @@ public void testPassThroughSinglePacketPost() throws Exception { @Test public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Exception { - var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(ctx, 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -71,14 +70,13 @@ public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Ex @Test public void testRemoveAuthHeadersWorks() throws Exception { - var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), RemovingAuthTransformerFactory.instance, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(ctx, 0)); + TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/get_withAuthHeader.txt")) { @@ -98,7 +96,6 @@ public void testRemoveAuthHeadersWorks() throws Exception { @Test public void testPartialBodyThrowsAndIsRedriven() throws Exception { - var ctx = TestContext.noTracking(); final var dummyAggregatedResponse = new AggregatedRawResponse(17, null, null, null); var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var complexTransformer = new JsonCompositeTransformer(new IJsonTransformer() { @@ -118,7 +115,7 @@ private void walkMaps(Object o) { }); var transformingHandler = new HttpJsonTransformingConsumer(complexTransformer, null, - testPacketCapture, TestRequestKey.getTestConnectionRequestContext(ctx, 0)); + testPacketCapture, TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPollsTest.java similarity index 92% rename from TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java rename to TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPollsTest.java index f3d593184..ab2771053 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPolls.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaCommitsWorkBetweenLongPollsTest.java @@ -5,14 +5,12 @@ import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.Producer; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; @@ -26,7 +24,7 @@ @Slf4j @Testcontainers(disabledWithoutDocker = true) @Tag("requiresDocker") -public class KafkaCommitsWorkBetweenLongPolls { +public class KafkaCommitsWorkBetweenLongPollsTest extends InstrumentationTest { private static final long DEFAULT_POLL_INTERVAL_MS = 1000; private static final int NUM_RUNS = 5; public static final String TEST_TOPIC_NAME = "test-topic"; @@ -48,8 +46,7 @@ private KafkaConsumer buildKafkaConsumer() { @Test @Tag("longTest") public void testThatCommitsAndReadsKeepWorking() throws Exception { - final var rootContext = TestContext.noTracking(); - var kafkaSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), buildKafkaConsumer(), + var kafkaSource = new KafkaTrafficCaptureSource(rootContext, buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS/3)); var blockingSource = new BlockingTrafficSource(kafkaSource, Duration.ofMinutes(5)); var kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java index 9c66b725d..c36a2fec8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java @@ -9,6 +9,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; @@ -29,7 +30,7 @@ @Slf4j @Testcontainers(disabledWithoutDocker = true) @Tag("requiresDocker") -public class KafkaKeepAliveTests { +public class KafkaKeepAliveTests extends InstrumentationTest { public static final String TEST_GROUP_CONSUMER_ID = "TEST_GROUP_CONSUMER_ID"; public static final String HEARTBEAT_INTERVAL_MS_KEY = "heartbeat.interval.ms"; public static final long MAX_POLL_INTERVAL_MS = 1000; @@ -55,7 +56,7 @@ public class KafkaKeepAliveTests { * @throws Exception */ @BeforeEach - private void setupTestCase() throws Exception { + private void setupTestCase(TestContext testContext) throws Exception { kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); this.sendCompleteCount = new AtomicInteger(0); KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 0, sendCompleteCount).get(); @@ -69,12 +70,12 @@ private void setupTestCase() throws Exception { kafkaProperties.put(HEARTBEAT_INTERVAL_MS_KEY, HEARTBEAT_INTERVAL_MS+""); kafkaProperties.put("max.poll.records", 1); var kafkaConsumer = new KafkaConsumer(kafkaProperties); - this.kafkaSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), + this.kafkaSource = new KafkaTrafficCaptureSource(testContext, kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); this.trafficSource = new BlockingTrafficSource(kafkaSource, Duration.ZERO); this.keysReceived = new ArrayList<>(); - readNextNStreams(trafficSource, keysReceived, 0, 1); + readNextNStreams(testContext, trafficSource, keysReceived, 0, 1); KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 1, sendCompleteCount); } @@ -100,7 +101,7 @@ public void testTimeoutsDontOccurForSlowPolls() throws Exception { pollIntervalMs, TimeUnit.MILLISECONDS); // wait for 2 messages so that they include the last one produced by the async schedule call previously - readNextNStreams(trafficSource, keysReceived, 1, 2); + readNextNStreams(rootContext, trafficSource, keysReceived, 1, 2); Assertions.assertEquals(3, keysReceived.size()); // At this point, we've read all (3) messages produced , committed the first one // (all the way through to Kafka), and no commits are in-flight yet for the last two messages. @@ -112,7 +113,7 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti for (int i=0; i<2; ++i) { KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 1 + i, sendCompleteCount).get(); } - readNextNStreams(trafficSource, keysReceived, 1, 1); + readNextNStreams(rootContext, trafficSource, keysReceived, 1, 1); trafficSource.commitTrafficStream(keysReceived.get(0)); log.info("Called commitTrafficStream but waiting long enough for the client to leave the group. " + @@ -127,7 +128,7 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti log.info("re-establish a client connection so that the following commit will work"); log.atInfo().setMessage(()->"1 ..."+renderNextCommitsAsString()).log(); - readNextNStreams(trafficSource, keysReceived, 0, 1); + readNextNStreams(rootContext, trafficSource, keysReceived, 0, 1); log.atInfo().setMessage(()->"2 ..."+renderNextCommitsAsString()).log(); log.info("wait long enough to fall out of the group again"); @@ -136,16 +137,16 @@ public void testBlockedReadsAndBrokenCommitsDontCauseReordering() throws Excepti var keysReceivedUntilDrop2 = keysReceived; keysReceived = new ArrayList<>(); log.atInfo().setMessage(()->"re-establish... 3 ..."+renderNextCommitsAsString()).log(); - readNextNStreams(trafficSource, keysReceived, 0, 1); + readNextNStreams(rootContext, trafficSource, keysReceived, 0, 1); trafficSource.commitTrafficStream(keysReceivedUntilDrop1.get(1)); log.atInfo().setMessage(()->"re-establish... 4 ..."+renderNextCommitsAsString()).log(); - readNextNStreams(trafficSource, keysReceived, 1, 1); + readNextNStreams(rootContext, trafficSource, keysReceived, 1, 1); log.atInfo().setMessage(()->"5 ..."+renderNextCommitsAsString()).log(); Thread.sleep(2*MAX_POLL_INTERVAL_MS); var keysReceivedUntilDrop3 = keysReceived; keysReceived = new ArrayList<>(); - readNextNStreams(trafficSource, keysReceived, 0, 3); + readNextNStreams(rootContext, trafficSource, keysReceived, 0, 3); log.atInfo().setMessage(()->"6 ..."+kafkaSource.trackingKafkaConsumer.nextCommitsToString()).log(); trafficSource.close(); } @@ -155,10 +156,9 @@ private String renderNextCommitsAsString() { } @SneakyThrows - private static void readNextNStreams(BlockingTrafficSource kafkaSource, List keysReceived, - int from, int count) { + private static void readNextNStreams(TestContext rootContext, BlockingTrafficSource kafkaSource, + List keysReceived, int from, int count) { Assertions.assertEquals(from, keysReceived.size()); - final var rootContext = TestContext.noTracking(); for (int i=0; i{ diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index a283b5e79..b5fd2aecf 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -5,6 +5,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; @@ -20,7 +21,7 @@ @Slf4j @Testcontainers(disabledWithoutDocker = true) @Tag("requiresDocker") -public class KafkaTrafficCaptureSourceLongTermTest { +public class KafkaTrafficCaptureSourceLongTermTest extends InstrumentationTest { public static final int TEST_RECORD_COUNT = 10; public static final String TEST_GROUP_CONSUMER_ID = "TEST_GROUP_CONSUMER_ID"; @@ -36,21 +37,20 @@ public class KafkaTrafficCaptureSourceLongTermTest { @Tag("longTest") public void testTrafficCaptureSource() throws Exception { String testTopicName = "TEST_TOPIC"; - final var rootContext = TestContext.noTracking(); var kafkaConsumerProps = KafkaTrafficCaptureSource.buildKafkaProperties(embeddedKafkaBroker.getBootstrapServers(), - TEST_GROUP_CONSUMER_ID, false, null); + TEST_GROUP_CONSUMER_ID, false, null); final long MAX_POLL_MS = 10000; - kafkaConsumerProps.setProperty(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY, MAX_POLL_MS+""); - var kafkaConsumer = new KafkaConsumer(kafkaConsumerProps); - var kafkaTrafficCaptureSource = new KafkaTrafficCaptureSource(TestContext.noTracking(), + kafkaConsumerProps.setProperty(KafkaTrafficCaptureSource.MAX_POLL_INTERVAL_KEY, MAX_POLL_MS + ""); + var kafkaConsumer = new KafkaConsumer(kafkaConsumerProps); + var kafkaTrafficCaptureSource = new KafkaTrafficCaptureSource(rootContext, kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_MS)); var kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); var sendCompleteCount = new AtomicInteger(0); var scheduledIterationsCount = new AtomicInteger(0); var executor = Executors.newSingleThreadScheduledExecutor(); - executor.scheduleAtFixedRate(()->{ + executor.scheduleAtFixedRate(() -> { var i = scheduledIterationsCount.getAndIncrement(); if (i >= TEST_RECORD_COUNT) { executor.shutdown(); @@ -59,27 +59,28 @@ public void testTrafficCaptureSource() throws Exception { } }, 0, PRODUCER_SLEEP_INTERVAL_MS, TimeUnit.MILLISECONDS); - for (int i=0; i { - var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(rootContext::createReadChunkContext) - .get(1, TimeUnit.SECONDS); - if (rogueChunk.isEmpty()) { - // TimeoutExceptions cannot be thrown by the supplier of the CompletableFuture today, BUT we - // could long-poll on the broker for longer than the timeout value supplied in the get() call above - throw new TimeoutException("read actually returned 0 items, but transforming this to a " + - "TimeoutException because either result would be valid."); - } - log.error("rogue chunk: "+ rogueChunk); + Assertions.assertThrows(TimeoutException.class, () -> { + var rogueChunk = kafkaTrafficCaptureSource.readNextTrafficStreamChunk(rootContext::createReadChunkContext) + .get(1, TimeUnit.SECONDS); + if (rogueChunk.isEmpty()) { + // TimeoutExceptions cannot be thrown by the supplier of the CompletableFuture today, BUT we + // could long-poll on the broker for longer than the timeout value supplied in the get() call above + throw new TimeoutException("read actually returned 0 items, but transforming this to a " + + "TimeoutException because either result would be valid."); + } + log.error("rogue chunk: " + rogueChunk); }); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index 3d934d613..5eec9a7a1 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -10,13 +10,11 @@ import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.KafkaConsumerContexts; import org.opensearch.migrations.replay.tracing.ReplayContexts; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; @@ -37,13 +35,12 @@ import java.util.function.Supplier; @Slf4j -class KafkaTrafficCaptureSourceTest { +class KafkaTrafficCaptureSourceTest extends InstrumentationTest { public static final int NUM_READ_ITEMS_BOUND = 1000; public static final String TEST_TOPIC_NAME = "TEST_TOPIC_NAME"; @Test public void testRecordToString() { - final var rootContext = TestContext.noTracking(); var ts = TrafficStream.newBuilder() .setConnectionId("c") .setNodeId("n") @@ -58,10 +55,9 @@ public void testRecordToString() { @Test public void testSupplyTrafficFromSource() { - final var rootContext = TestContext.noTracking(); int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); - KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.noTracking(), + KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(rootContext, mockConsumer, TEST_TOPIC_NAME, Duration.ofHours(1)); initializeMockConsumerTopic(mockConsumer); @@ -103,7 +99,7 @@ public void testSupplyTrafficFromSource() { public void testSupplyTrafficWithUnformattedMessages() { int numTrafficStreams = 10; MockConsumer mockConsumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); - KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(TestContext.noTracking(), + KafkaTrafficCaptureSource protobufConsumer = new KafkaTrafficCaptureSource(rootContext, mockConsumer, TEST_TOPIC_NAME, Duration.ofHours(1)); initializeMockConsumerTopic(mockConsumer); @@ -129,7 +125,6 @@ public void testSupplyTrafficWithUnformattedMessages() { // This assertion will fail the test case if not completed within its duration, as would be the case if there // were missing traffic streams. Its task currently is limited to the numTrafficStreams where it will stop the stream - final var rootContext = TestContext.noTracking(); var tsCount = new AtomicInteger(); Assertions.assertTimeoutPreemptively(Duration.ofSeconds(1), () -> { while (tsCount.get() < numTrafficStreams) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index 93c751fc0..d4e062cc4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -2,31 +2,26 @@ import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.trace.data.SpanData; -import lombok.Getter; import lombok.Lombok; -import lombok.Setter; -import lombok.SneakyThrows; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.TestContext; import java.time.Duration; import java.time.Instant; import java.util.Arrays; import java.util.List; -import java.util.function.BiConsumer; -import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; public class TracingTest { @Test public void tracingWorks() { - TestContext rootContext = TestContext.withTracking(); + TestContext rootContext = TestContext.withAllTracking(); var tssk = new ISourceTrafficChannelKey.PojoImpl("n", "c"); try (var channelCtx = rootContext.createChannelContext(tssk); var kafkaRecordCtx = @@ -62,6 +57,8 @@ public void tracingWorks() { checkSpans(recordedSpans); checkMetrics(recordedMetrics); + + Assertions.assertTrue(rootContext.contextTracker.getAllRemainingActiveScopes().isEmpty()); } private void checkMetrics(List recordedMetrics) { diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java new file mode 100644 index 000000000..85aa9a065 --- /dev/null +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java @@ -0,0 +1,23 @@ +package org.opensearch.migrations.tracing; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.opensearch.migrations.tracing.TestContext; + +public class InstrumentationTest { + + protected TestContext rootContext; + + protected TestContext makeContext() { return TestContext.noOtelTracking(); } + + @BeforeEach + protected void initializeContext() { + rootContext = makeContext(); + } + + @AfterEach + protected void teardownContext() { + rootContext.close(); + rootContext = null; + } +} diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index efd073f98..efa994f0d 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -2,25 +2,43 @@ import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import org.junit.jupiter.api.Assertions; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; -public class TestContext extends RootReplayerContext { +import java.util.stream.Collectors; + +public class TestContext extends RootReplayerContext implements AutoCloseable { public final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; + public final ContextTracker contextTracker = new ContextTracker(); public final ChannelContextManager channelContextManager = new ChannelContextManager(this); - public static TestContext withTracking() { + public static TestContext withTracking(boolean tracing, boolean metrics) { + return new TestContext(new InMemoryInstrumentationBundle(tracing, metrics)); + } + + public static TestContext withAllTracking() { return new TestContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), InMemoryMetricExporter.create())); } - public static TestContext noTracking() { + public static TestContext noOtelTracking() { return new TestContext(new InMemoryInstrumentationBundle(null, null)); } + @Override + public void onContextCreated(IScopedInstrumentationAttributes newScopedContext) { + contextTracker.onCreated(newScopedContext); + } + + @Override + public void onContextClosed(IScopedInstrumentationAttributes newScopedContext) { + contextTracker.onClosed(newScopedContext); + } + public TestContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) { super(inMemoryInstrumentationBundle.openTelemetrySdk); this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; @@ -29,4 +47,10 @@ public TestContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficStreamContextForTest(ITrafficStreamKey tsk) { return createTrafficStreamContextForStreamSource(channelContextManager.retainOrCreateContext(tsk), tsk); } + + @Override + public void close() { +// Assertions.assertEquals("", contextTracker.getAllRemainingActiveScopes().entrySet().stream() +// .map(kvp->kvp.getKey().toString()).collect(Collectors.joining())); + } } From d3ee4f1ed5e62b455610b2570bd79050a35c9cb0 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 19 Jan 2024 00:09:21 -0500 Subject: [PATCH 64/94] Change how MetricInstruments classes are instantiated. I had been pulling activity names from the enclosing class, but in cases of inheritance, that was unmaintainable. That had caused a number of automatically-generated metrics to be missing because subclasses would use the superclass ACTIVITY_NAME definition. Now, all the MetricInstruments constructors are private and the objects are created via factory functions that are alongside the inner class definitions. That moves the naming responsibilities to the actual part of the code that cares and makes the system more foolproof. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 11 +- .../TestRootKafkaOffloaderContext.java | 4 +- .../tracing/ConnectionContext.java | 8 +- .../netty/tracing/RootWireLoggingContext.java | 10 +- .../netty/tracing/WireCaptureContexts.java | 42 +++++-- .../proxyserver/RootCaptureContext.java | 2 +- .../replay/tracing/KafkaConsumerContexts.java | 45 ++++++-- .../replay/tracing/ReplayContexts.java | 104 +++++++++++++----- .../replay/tracing/RootReplayerContext.java | 42 +++---- .../replay/tracing/TrafficSourceContexts.java | 24 +++- 10 files changed, 203 insertions(+), 89 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 830c42c2d..a4cc59df4 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -5,6 +5,7 @@ import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; import lombok.Getter; +import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; @@ -37,13 +38,17 @@ public KafkaRecordContext(IRootKafkaOffloaderContext rootScope, IConnectionConte } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override - public MetricInstruments getMetrics() { + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().getKafkaOffloadingInstruments(); } diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java index b5382ce2f..5733d751d 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java @@ -38,7 +38,7 @@ public TestRootKafkaOffloaderContext(InMemoryInstrumentationBundle inMemoryInstr inMemoryInstrumentationBundle.openTelemetrySdk); this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; final var meter = getMeterProvider().get("test"); - this.kafkaOffloadingInstruments = new KafkaRecordContext.MetricInstruments(meter); - this.connectionInstruments = new ConnectionContext.MetricInstruments(meter); + this.kafkaOffloadingInstruments = KafkaRecordContext.makeMetrics(meter); + this.connectionInstruments = ConnectionContext.makeMetrics(meter); } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 8d410dc80..8ab31e759 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -42,13 +42,17 @@ public ConnectionContext(IRootOffloaderContext rootInstrumentationScope, String public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongUpDownCounter activeConnectionsCounter; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + protected MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); activeConnectionsCounter = meter.upDownCounterBuilder(ConnectionContext.ACTIVE_CONNECTION) .setUnit("count").build(); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().getConnectionInstruments(); } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java index cbfa15f04..503861b1f 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/RootWireLoggingContext.java @@ -21,11 +21,11 @@ public RootWireLoggingContext(OpenTelemetry openTelemetry) { public RootWireLoggingContext(OpenTelemetry openTelemetry, String scopeName) { super(scopeName, openTelemetry); var meter = this.getMeterProvider().get(scopeName); - connectionInstruments = new WireCaptureContexts.ConnectionContext.MetricInstruments(meter); - requestInstruments = new WireCaptureContexts.RequestContext.MetricInstruments(meter); - blockingInstruments = new WireCaptureContexts.BlockingContext.MetricInstruments(meter); - waitingForResponseInstruments = new WireCaptureContexts.WaitingForResponseContext.MetricInstruments(meter); - responseInstruments = new WireCaptureContexts.ResponseContext.MetricInstruments(meter); + connectionInstruments = WireCaptureContexts.ConnectionContext.makeMetrics(meter); + requestInstruments = WireCaptureContexts.RequestContext.makeMetrics(meter); + blockingInstruments = WireCaptureContexts.BlockingContext.makeMetrics(meter); + waitingForResponseInstruments = WireCaptureContexts.WaitingForResponseContext.makeMetrics(meter); + responseInstruments = WireCaptureContexts.ResponseContext.makeMetrics(meter); } @Override diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java index 2cd7a6dbc..58da58a23 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -31,8 +31,8 @@ public static class MetricInstruments public final LongCounter unregisteredCounter; public final LongCounter removedCounter; - public MetricInstruments(Meter meter) { - super(meter); + public MetricInstruments(Meter meter, String activityMeter) { + super(meter, activityMeter); unregisteredCounter = meter .counterBuilder(MetricNames.UNREGISTERED).setUnit(COUNT_UNITS).build(); removedCounter = meter @@ -40,6 +40,10 @@ public MetricInstruments(Meter meter) { } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().getConnectionInstruments(); @@ -130,8 +134,8 @@ public static class MetricInstruments public final LongCounter fullyParsedRequestCounter; public final LongCounter bytesReadCounter; - public MetricInstruments(Meter meter) { - super(meter); + public MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); blockingRequestCounter = meter .counterBuilder(MetricNames.BLOCKING_REQUEST).setUnit(COUNT_UNITS).build(); requestsNotOffloadedCounter = meter @@ -142,6 +146,10 @@ public MetricInstruments(Meter meter) { .counterBuilder(MetricNames.BYTES_READ).setUnit(BYTES_UNIT).build(); } } + + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } @Override public @NonNull MetricInstruments getMetrics() { @@ -185,11 +193,15 @@ public String getActivityName() { return ACTIVITY_NAME; } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override public RequestContext.MetricInstruments getMetrics() { return getRootInstrumentationScope().requestInstruments; @@ -211,11 +223,15 @@ public String getActivityName() { return ACTIVITY_NAME; } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override public RequestContext.MetricInstruments getMetrics() { return getRootInstrumentationScope().requestInstruments; @@ -240,15 +256,19 @@ public String getActivityName() { public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter bytesWritten; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); bytesWritten = meter .counterBuilder(MetricNames.BYTES_WRITTEN).setUnit(BYTES_UNIT).build(); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override - public MetricInstruments getMetrics() { + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().getResponseInstruments(); } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java index 6a9443eab..9e18fc251 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java @@ -13,6 +13,6 @@ public RootCaptureContext(OpenTelemetry capture) { @Override public KafkaRecordContext.MetricInstruments getKafkaOffloadingInstruments() { var meter = getMeterProvider().get("captureProxy"); - return new KafkaRecordContext.MetricInstruments(meter); + return KafkaRecordContext.makeMetrics(meter); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index b52992262..e32bede93 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -11,6 +11,7 @@ import lombok.NonNull; import lombok.Setter; import org.apache.kafka.common.TopicPartition; +import org.checkerframework.checker.units.qual.N; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; @@ -39,7 +40,7 @@ public static class MetricInstruments { public final LongCounter kafkaPartitionsRevokedCounter; public final LongCounter kafkaPartitionsAssignedCounter; public final LongUpDownCounter kafkaActivePartitionsCounter; - public MetricInstruments(Meter meter) { + private MetricInstruments(Meter meter) { kafkaPartitionsRevokedCounter = meter .counterBuilder(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT).build(); kafkaPartitionsAssignedCounter = meter @@ -49,6 +50,10 @@ public MetricInstruments(Meter meter) { } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter); + } + private @NonNull MetricInstruments getMetrics() { return enclosingScope.asyncListeningInstruments; } @@ -78,8 +83,8 @@ public IKafkaConsumerContexts.IPollScopeContext createNewPollContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } public TouchScopeContext(@NonNull TrafficSourceContexts.BackPressureBlockContext enclosingScope) { @@ -87,6 +92,10 @@ public TouchScopeContext(@NonNull TrafficSourceContexts.BackPressureBlockContext initializeSpan(); } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().touchInstruments; } @@ -96,13 +105,17 @@ public static class PollScopeContext extends BaseNestedSpanContext implements IKafkaConsumerContexts.IPollScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override - public CommonScopedMetricInstruments getMetrics() { + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().pollInstruments; } @@ -123,13 +136,17 @@ public IKafkaConsumerContexts.IKafkaCommitScopeContext createNewKafkaCommitConte } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override - public MetricInstruments getMetrics() { + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().commitInstruments; } @@ -144,13 +161,17 @@ public static class KafkaCommitScopeContext extends DirectNestedSpanContext implements IKafkaConsumerContexts.IKafkaCommitScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + @Override - public MetricInstruments getMetrics() { + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().kafkaCommitInstruments; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 499be5a6c..dd77b38a7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -39,12 +39,16 @@ public ChannelKeyContext(RootReplayerContext rootScope, public static class MetricInstruments extends CommonScopedMetricInstruments { final LongUpDownCounter activeChannelCounter; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); activeChannelCounter = meter .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); } } + + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().channelKeyInstruments; @@ -85,8 +89,8 @@ public IChannelKeyContext getLogicalEnclosingScope() { public static class MetricInstruments extends CommonScopedMetricInstruments { final LongCounter recordCounter; final LongCounter bytesCounter; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); recordCounter = meter.counterBuilder(MetricNames.KAFKA_RECORD_READ) .setUnit("records").build(); bytesCounter = meter.counterBuilder(MetricNames.KAFKA_BYTES_READ) @@ -94,6 +98,10 @@ public MetricInstruments(Meter meter) { } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().kafkaRecordInstruments; } @@ -128,13 +136,17 @@ protected TrafficStreamLifecycleContext(RootReplayerContext rootScope, public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter streamsRead; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); streamsRead = meter.counterBuilder(MetricNames.TRAFFIC_STREAMS_READ) .setUnit("objects").build(); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().trafficStreamLifecycleInstruments; } @@ -188,11 +200,15 @@ public IReplayContexts.ITupleHandlingContext createTupleContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().httpTransactionInstruments; } @@ -249,11 +265,15 @@ public RequestAccumulationContext(HttpTransactionContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().requestAccumInstruments; } @@ -268,11 +288,15 @@ public ResponseAccumulationContext(HttpTransactionContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().responseAccumInstruments; } @@ -304,8 +328,8 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter transformBytesOut; private final LongCounter transformChunksOut; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); headerParses = meter.counterBuilder(MetricNames.TRANSFORM_HEADER_PARSE) .setUnit(COUNT_UNIT_STR).build(); payloadParses = meter.counterBuilder(MetricNames.TRANSFORM_PAYLOAD_PARSE_REQUIRED) @@ -342,6 +366,10 @@ public MetricInstruments(Meter meter) { } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().transformationInstruments; } @@ -405,12 +433,16 @@ public ScheduledContext(HttpTransactionContext enclosingScope, Instant scheduled public static class MetricInstruments extends CommonScopedMetricInstruments { DoubleHistogram lag; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); lag = meter.histogramBuilder(MetricNames.NETTY_SCHEDULE_LAG).setUnit("ms").build(); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().scheduledInstruments; } @@ -438,8 +470,8 @@ public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter bytesWritten; private final LongCounter bytesRead; - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); sourceTargetGap = meter.histogramBuilder(MetricNames.SOURCE_TO_TARGET_REQUEST_LAG) .setUnit("ms").build(); bytesWritten = meter.counterBuilder(MetricNames.BYTES_WRITTEN_TO_TARGET) @@ -449,6 +481,10 @@ public MetricInstruments(Meter meter) { } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().targetRequestInstruments; } @@ -488,11 +524,15 @@ public RequestSendingContext(TargetRequestContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().requestSendingInstruments; } @@ -507,11 +547,15 @@ public WaitingForHttpResponseContext(TargetRequestContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().waitingForHttpResponseInstruments; } @@ -527,11 +571,15 @@ public ReceivingHttpResponseContext(TargetRequestContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().receivingHttpInstruments; } @@ -547,11 +595,15 @@ public TupleHandlingContext(HttpTransactionContext enclosingScope) { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().tupleHandlingInstruments; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 4d2306460..e09f63a01 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -40,29 +40,29 @@ public RootReplayerContext(OpenTelemetry sdk) { super(SCOPE_NAME, sdk); var meter = this.getMeterProvider().get(SCOPE_NAME); - asyncListeningInstruments = new KafkaConsumerContexts.AsyncListeningContext.MetricInstruments(meter); - touchInstruments = new KafkaConsumerContexts.TouchScopeContext.MetricInstruments(meter); - pollInstruments = new KafkaConsumerContexts.PollScopeContext.MetricInstruments(meter); - commitInstruments = new KafkaConsumerContexts.CommitScopeContext.MetricInstruments(meter); - kafkaCommitInstruments = new KafkaConsumerContexts.KafkaCommitScopeContext.MetricInstruments(meter); + asyncListeningInstruments = KafkaConsumerContexts.AsyncListeningContext.makeMetrics(meter); + touchInstruments = KafkaConsumerContexts.TouchScopeContext.makeMetrics(meter); + pollInstruments = KafkaConsumerContexts.PollScopeContext.makeMetrics(meter); + commitInstruments = KafkaConsumerContexts.CommitScopeContext.makeMetrics(meter); + kafkaCommitInstruments = KafkaConsumerContexts.KafkaCommitScopeContext.makeMetrics(meter); - readChunkInstruments = new TrafficSourceContexts.ReadChunkContext.MetricInstruments(meter); - backPressureInstruments = new TrafficSourceContexts.BackPressureBlockContext.MetricInstruments(meter); - waitForNextSignalInstruments = new TrafficSourceContexts.WaitForNextSignal.MetricInstruments(meter); + readChunkInstruments = TrafficSourceContexts.ReadChunkContext.makeMetrics(meter); + backPressureInstruments = TrafficSourceContexts.BackPressureBlockContext.makeMetrics(meter); + waitForNextSignalInstruments = TrafficSourceContexts.WaitForNextSignal.makeMetrics(meter); - channelKeyInstruments = new ReplayContexts.ChannelKeyContext.MetricInstruments(meter); - kafkaRecordInstruments = new ReplayContexts.KafkaRecordContext.MetricInstruments(meter); - trafficStreamLifecycleInstruments = new ReplayContexts.TrafficStreamLifecycleContext.MetricInstruments(meter); - httpTransactionInstruments = new ReplayContexts.HttpTransactionContext.MetricInstruments(meter); - requestAccumInstruments = new ReplayContexts.RequestAccumulationContext.MetricInstruments(meter); - responseAccumInstruments = new ReplayContexts.ResponseAccumulationContext.MetricInstruments(meter); - transformationInstruments = new ReplayContexts.RequestTransformationContext.MetricInstruments(meter); - scheduledInstruments = new ReplayContexts.ScheduledContext.MetricInstruments(meter); - targetRequestInstruments = new ReplayContexts.TargetRequestContext.MetricInstruments(meter); - requestSendingInstruments = new ReplayContexts.RequestSendingContext.MetricInstruments(meter); - waitingForHttpResponseInstruments = new ReplayContexts.WaitingForHttpResponseContext.MetricInstruments(meter); - receivingHttpInstruments = new ReplayContexts.ReceivingHttpResponseContext.MetricInstruments(meter); - tupleHandlingInstruments = new ReplayContexts.TupleHandlingContext.MetricInstruments(meter); + channelKeyInstruments = ReplayContexts.ChannelKeyContext.makeMetrics(meter); + kafkaRecordInstruments = ReplayContexts.KafkaRecordContext.makeMetrics(meter); + trafficStreamLifecycleInstruments = ReplayContexts.TrafficStreamLifecycleContext.makeMetrics(meter); + httpTransactionInstruments = ReplayContexts.HttpTransactionContext.makeMetrics(meter); + requestAccumInstruments = ReplayContexts.RequestAccumulationContext.makeMetrics(meter); + responseAccumInstruments = ReplayContexts.ResponseAccumulationContext.makeMetrics(meter); + transformationInstruments = ReplayContexts.RequestTransformationContext.makeMetrics(meter); + scheduledInstruments = ReplayContexts.ScheduledContext.makeMetrics(meter); + targetRequestInstruments = ReplayContexts.TargetRequestContext.makeMetrics(meter); + requestSendingInstruments = ReplayContexts.RequestSendingContext.makeMetrics(meter); + waitingForHttpResponseInstruments = ReplayContexts.WaitingForHttpResponseContext.makeMetrics(meter); + receivingHttpInstruments = ReplayContexts.ReceivingHttpResponseContext.makeMetrics(meter); + tupleHandlingInstruments = ReplayContexts.TupleHandlingContext.makeMetrics(meter); } @Override diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index d5e9c5f7b..fbc1c3d0c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -32,10 +32,14 @@ public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().readChunkInstruments; } @@ -67,10 +71,14 @@ public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().backPressureInstruments; } @@ -86,10 +94,14 @@ public static class WaitForNextSignal extends BaseNestedSpanContext implements ITrafficSourceContexts.IWaitForNextSignal { public static class MetricInstruments extends CommonScopedMetricInstruments { - public MetricInstruments(Meter meter) { - super(meter, ACTIVITY_NAME); + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + public @NonNull MetricInstruments getMetrics() { return getRootInstrumentationScope().waitForNextSignalInstruments; } From 54c5e27347b4e36218fda29d6d74b6befd89a1fd Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 19 Jan 2024 18:17:12 -0500 Subject: [PATCH 65/94] Build fix - When refactoring to use TestContexts more globally, a test setup method was corrupted. Signed-off-by: Greg Schohn --- .../migrations/replay/kafka/KafkaKeepAliveTests.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java index c36a2fec8..eea297cb9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java @@ -56,7 +56,7 @@ public class KafkaKeepAliveTests extends InstrumentationTest { * @throws Exception */ @BeforeEach - private void setupTestCase(TestContext testContext) throws Exception { + private void setupTestCase() throws Exception { kafkaProducer = KafkaTestUtils.buildKafkaProducer(embeddedKafkaBroker.getBootstrapServers()); this.sendCompleteCount = new AtomicInteger(0); KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 0, sendCompleteCount).get(); @@ -70,12 +70,12 @@ private void setupTestCase(TestContext testContext) throws Exception { kafkaProperties.put(HEARTBEAT_INTERVAL_MS_KEY, HEARTBEAT_INTERVAL_MS+""); kafkaProperties.put("max.poll.records", 1); var kafkaConsumer = new KafkaConsumer(kafkaProperties); - this.kafkaSource = new KafkaTrafficCaptureSource(testContext, + this.kafkaSource = new KafkaTrafficCaptureSource(rootContext, kafkaConsumer, testTopicName, Duration.ofMillis(MAX_POLL_INTERVAL_MS)); this.trafficSource = new BlockingTrafficSource(kafkaSource, Duration.ZERO); this.keysReceived = new ArrayList<>(); - readNextNStreams(testContext, trafficSource, keysReceived, 0, 1); + readNextNStreams(rootContext, trafficSource, keysReceived, 0, 1); KafkaTestUtils.produceKafkaRecord(testTopicName, kafkaProducer, 1, sendCompleteCount); } From 0ce6694dca25a860163ae72f7ad4c14f1cdb0512 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 19 Jan 2024 18:23:33 -0500 Subject: [PATCH 66/94] Spin up a grafana container in the docker solution with simple credentials. Signed-off-by: Greg Schohn --- .../src/main/docker/docker-compose.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index ad97dafc9..cdf76ecdc 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -27,6 +27,19 @@ services: environment: - COLLECTOR_OTLP_ENABLED=true + grafana: + image: grafana/grafana:latest + networks: + - migrations + ports: + - "3000:3000" + volumes: + - grafana_data:/var/lib/grafana + environment: + - GF_SECURITY_ADMIN_PASSWORD=admin + depends_on: + - prometheus + # Collector otel-collector: image: otel/opentelemetry-collector:latest @@ -155,6 +168,8 @@ volumes: driver: local sharedReplayerOutput: driver: local + grafana_data: + driver: local networks: migrations: From c8674ebcfa524c110847f6838e7f348451c53f7f Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sun, 21 Jan 2024 10:32:40 -0500 Subject: [PATCH 67/94] Start to get Source/Target comparison metrics in place and more refactoring to rely upon values within contexts rather than passing redundant copies for things like request/channel keys. Logging comparison metrics as logs is being phased out, being replaced by using metric and span attributes to directly pick up values (and letting those downstream dashboards to add additional logic to determine if values were matching/not, though statusMatch does still exist as a metric - only because its quick and easy on both ends). All tests pass, but I haven't done any testing with the dockerSolution. I need to re-enable the kafka container test because I need to be validating constantly that that test is working, but as of now, it throws an OOM error, which needs to be investigated. Signed-off-by: Greg Schohn --- .../tracing/BaseNestedSpanContext.java | 7 ++ .../tracing/IInstrumentationAttributes.java | 55 ++++++++++-- .../tracing/IWithStartTimeAndAttributes.java | 21 ++--- .../replay/AddCompressionEncodingTest.java | 2 +- .../replay/AccumulationCallbacks.java | 15 ++-- ...edTrafficToHttpTransactionAccumulator.java | 10 +-- .../replay/PacketConsumerFactory.java | 3 +- ...acketToTransformingHttpHandlerFactory.java | 2 +- .../replay/ParsedHttpMessagesAsDicts.java | 84 ++++++++----------- .../migrations/replay/ReplayEngine.java | 6 +- .../replay/ResultsToLogsConsumer.java | 4 +- .../replay/SourceTargetCaptureTuple.java | 17 ++-- .../migrations/replay/TrafficReplayer.java | 83 +++++++++--------- .../replay/TupleParserChainConsumer.java | 7 +- .../replay/tracing/IReplayContexts.java | 24 +++++- .../replay/tracing/ReplayContexts.java | 68 +++++++++++++++ .../FullReplayerWithTracingChecksTest.java | 6 +- .../replay/FullTrafficReplayerTest.java | 7 +- .../replay/HeaderTransformerTest.java | 7 +- .../KafkaRestartingTrafficReplayerTest.java | 4 +- .../replay/ParsedHttpMessagesAsDictsTest.java | 70 ++-------------- .../replay/RequestSenderOrchestratorTest.java | 5 +- .../replay/ResultsToLogsConsumerTest.java | 66 ++++++++------- ...afficToHttpTransactionAccumulatorTest.java | 19 ++--- .../replay/TrafficReplayerRunner.java | 3 +- .../replay/TrafficReplayerTest.java | 36 ++++---- .../NettyPacketToHttpConsumerTest.java | 8 +- .../HttpJsonTransformingConsumerTest.java | 10 +-- .../replay/datatypes/MockMetricsBuilder.java | 24 ------ .../migrations/replay/TestRequestKey.java | 31 ------- .../migrations/replay/TestUtils.java | 2 +- .../migrations/tracing/TestContext.java | 41 +++++++-- 32 files changed, 388 insertions(+), 359 deletions(-) delete mode 100644 TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datatypes/MockMetricsBuilder.java delete mode 100644 TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index d36d20887..cbe9e5a29 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.trace.Span; @@ -8,6 +9,7 @@ import lombok.Setter; import java.time.Instant; +import java.util.Optional; import java.util.stream.Stream; public abstract class BaseNestedSpanContext @@ -20,6 +22,11 @@ public abstract class BaseNestedSpanContext @Getter @Setter Exception observedExceptionToIncludeInMetrics; + protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder attributesBuilder, + AttributeKey key, Optional value) { + return value.map(v -> attributesBuilder.put(key, v)).orElse(attributesBuilder); + } + protected BaseNestedSpanContext(S rootScope, T enclosingScope) { rootScope.onContextCreated(this); this.enclosingScope = enclosingScope; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 6c84a130a..220bfe3ae 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -3,17 +3,24 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; import lombok.NonNull; +import org.opensearch.migrations.coreutils.MetricsAttributeKey; +import org.opensearch.migrations.coreutils.MetricsLogBuilder; +import java.time.Duration; import java.util.ArrayList; +import java.util.Optional; public interface IInstrumentationAttributes { AttributeKey HAD_EXCEPTION_KEY = AttributeKey.booleanKey("hadException"); IInstrumentationAttributes getEnclosingScope(); + default Span getCurrentSpan() { return null; } default AttributesBuilder fillAttributes(AttributesBuilder builder) { @@ -23,10 +30,9 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { Exception getObservedExceptionToIncludeInMetrics(); void setObservedExceptionToIncludeInMetrics(Exception e); - default @NonNull Attributes getPopulatedMetricAttributes() { + default @NonNull Attributes getPopulatedMetricAttributes(AttributesBuilder attributesBuilder) { final var e = getObservedExceptionToIncludeInMetrics(); - var b = Attributes.builder(); - return e == null ? b.build() : b.put(HAD_EXCEPTION_KEY, true).build(); + return e == null ? attributesBuilder.build() : attributesBuilder.put(HAD_EXCEPTION_KEY, true).build(); } default Attributes getPopulatedSpanAttributes() { @@ -54,15 +60,54 @@ default AttributesBuilder getPopulatedSpanAttributesBuilder(AttributesBuilder bu default void meterIncrementEvent(LongCounter c) { meterIncrementEvent(c, 1); } + default void meterIncrementEvent(LongCounter c, long increment) { + meterIncrementEvent(c, increment, Attributes.builder()); + } + + default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - c.add(increment, getPopulatedMetricAttributes()); + c.add(increment, getPopulatedMetricAttributes(attributesBuilder)); } } + default void meterDeltaEvent(LongUpDownCounter c, long delta) { + meterDeltaEvent(c, delta, Attributes.builder()); + } + + default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { try (var scope = new NullableExemplarScope(getCurrentSpan())) { - var attributes = getPopulatedMetricAttributes(); + var attributes = getPopulatedMetricAttributes(attributesBuilder); c.add(delta, attributes); } } + + default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { + meterHistogram(histogram, value.toNanos()/1_000_000.0); + } + + default void meterHistogramMillis(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { + meterHistogram(histogram, value.toNanos()/1_000_000.0, attributesBuilder); + } + + default void meterHistogram(DoubleHistogram histogram, double value) { + meterHistogram(histogram, value, Attributes.builder()); + } + + default void meterHistogram(DoubleHistogram histogram, double value, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + histogram.record(value, getPopulatedMetricAttributes(attributesBuilder)); + } + } + + default void meterHistogram(LongHistogram histogram, long value) { + meterHistogram(histogram, value, Attributes.builder()); + } + + default void meterHistogram(LongHistogram histogram, long value, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + histogram.record(value, getPopulatedMetricAttributes(attributesBuilder)); + } + } + } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index af5181e8c..75f90f0d6 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -10,20 +10,15 @@ public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { Instant getStartTime(); - default void meterHistogramMillis(DoubleHistogram histogram) { - meterHistogramMillis(histogram, Duration.between(getStartTime(), Instant.now())); - } - default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { - meterHistogram(histogram, value.toNanos()/1_000_000.0); + default Duration getSpanDuration() { + return Duration.between(getStartTime(), Instant.now()); } - default void meterHistogram(DoubleHistogram histogram, double value) { - try (var scope = new NullableExemplarScope(getCurrentSpan())) { - histogram.record(value, getPopulatedMetricAttributes()); - } + + default void meterHistogramMillis(DoubleHistogram histogram) { + meterHistogramMillis(histogram, getSpanDuration()); } - default void meterHistogram(LongHistogram histogram, long value) { - try (var scope = new NullableExemplarScope(getCurrentSpan())) { - histogram.record(value, getPopulatedMetricAttributes()); - } + + default void meterHistogramMillis(DoubleHistogram histogram, AttributesBuilder attributesBuilder) { + meterHistogramMillis(histogram, getSpanDuration(), attributesBuilder); } } diff --git a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java index 6ffafa521..4eb408162 100644 --- a/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java +++ b/TrafficCapture/replayerPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/java/org/opensearch/migrations/replay/AddCompressionEncodingTest.java @@ -35,7 +35,7 @@ public void addingCompressionRequestHeaderCompressesPayload() throws ExecutionEx JsonJoltTransformer.newBuilder() .addCannedOperation(JsonJoltTransformBuilder.CANNED_OPERATION.ADD_GZIP) .build(), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); final var payloadPartSize = 511; final var numParts = 1025; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index 8214ec3d9..c964ff1ca 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -10,20 +10,17 @@ import java.util.List; public interface AccumulationCallbacks { - void onRequestReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + void onRequestReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request); - void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + void onFullDataReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrpp); void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + @NonNull IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); - void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IReplayContexts.IChannelKeyContext ctx, + void onConnectionClose(int channelInteractionNumber, + @NonNull IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); - void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx); + void onTrafficStreamIgnored(@NonNull IReplayContexts.ITrafficStreamsLifecycleContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index 09c29baa6..a62195170 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -105,14 +105,13 @@ private static class SpanWrappingAccumulationCallbacks { public void onRequestReceived(IReplayContexts.IRequestAccumulationContext requestCtx, @NonNull HttpMessageAndTimestamp request) { requestCtx.close(); - underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope().getReplayerRequestKey(), - requestCtx.getLogicalEnclosingScope(), request); + underlying.onRequestReceived(requestCtx.getLogicalEnclosingScope(), request); } public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, @NonNull RequestResponsePacketPair rrpp) { rrpp.getResponseContext().close(); - underlying.onFullDataReceived(key, rrpp.getHttpTransactionContext(), rrpp); + underlying.onFullDataReceived(rrpp.getHttpTransactionContext(), rrpp); } public void onConnectionClose(@NonNull Accumulation accum, @@ -120,8 +119,7 @@ public void onConnectionClose(@NonNull Accumulation accum, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { var tsCtx = accum.trafficChannelKey.getTrafficStreamsContext(); - underlying.onConnectionClose(accum.trafficChannelKey, - accum.numberOfResets.get(), tsCtx.getLogicalEnclosingScope(), + underlying.onConnectionClose(accum.numberOfResets.get(), tsCtx.getLogicalEnclosingScope(), status, when, trafficStreamKeysBeingHeld); } @@ -133,7 +131,7 @@ public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStat public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk) { var tsCtx = tsk.getTrafficStreamsContext(); - underlying.onTrafficStreamIgnored(tsk, tsk.getTrafficStreamsContext().getLogicalEnclosingScope()); + underlying.onTrafficStreamIgnored(tsk.getTrafficStreamsContext()); } }; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java index 0658ce350..74cbdadec 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java @@ -5,6 +5,5 @@ import org.opensearch.migrations.replay.tracing.IReplayContexts; public interface PacketConsumerFactory { - IPacketFinalizingConsumer create(UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext context); + IPacketFinalizingConsumer create(IReplayContexts.IReplayerHttpTransactionContext context); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index 140f7b5bd..ef3cc42d2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -27,7 +27,7 @@ public PacketToTransformingHttpHandlerFactory(IJsonTransformer jsonTransformer, @Override public IPacketFinalizingConsumer> - create(UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { + create(IReplayContexts.IReplayerHttpTransactionContext httpTransactionContext) { log.trace("creating HttpJsonTransformingConsumer"); return new HttpJsonTransformingConsumer<>(jsonTransformer, authTransformerFactory, new TransformedPacketReceiver(), httpTransactionContext); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java index 8f690d8dc..ee29dee8f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java @@ -3,14 +3,13 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.HttpHeaders; +import io.opentelemetry.api.common.AttributesBuilder; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.coreutils.MetricsAttributeKey; -import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogBuilder; -import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueSourceRequestKey; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import java.time.Duration; import java.util.Base64; @@ -40,14 +39,16 @@ public class ParsedHttpMessagesAsDicts { public final Optional> sourceResponseOp; public final Optional> targetRequestOp; public final Optional> targetResponseOp; + public final IReplayContexts.ITupleHandlingContext context; - public ParsedHttpMessagesAsDicts(SourceTargetCaptureTuple tuple) { + public ParsedHttpMessagesAsDicts(@NonNull SourceTargetCaptureTuple tuple) { this(tuple, Optional.ofNullable(tuple.sourcePair)); } - protected ParsedHttpMessagesAsDicts(SourceTargetCaptureTuple tuple, + protected ParsedHttpMessagesAsDicts(@NonNull SourceTargetCaptureTuple tuple, Optional sourcePairOp) { - this(getSourceRequestOp(tuple.uniqueRequestKey, sourcePairOp), + this(tuple.context, + getSourceRequestOp(tuple.context, sourcePairOp), getSourceResponseOp(tuple, sourcePairOp), getTargetRequestOp(tuple), getTargetResponseOp(tuple)); @@ -56,69 +57,53 @@ protected ParsedHttpMessagesAsDicts(SourceTargetCaptureTuple tuple, private static Optional> getTargetResponseOp(SourceTargetCaptureTuple tuple) { return Optional.ofNullable(tuple.targetResponseData) .filter(r -> !r.isEmpty()) - .map(d -> convertResponse(tuple.uniqueRequestKey, d, tuple.targetResponseDuration)); + .map(d -> convertResponse(tuple.context, d, tuple.targetResponseDuration)); } private static Optional> getTargetRequestOp(SourceTargetCaptureTuple tuple) { return Optional.ofNullable(tuple.targetRequestData) - .map(d -> d.asByteArrayStream()) - .map(d -> convertRequest(tuple.uniqueRequestKey, d.collect(Collectors.toList()))); + .map(TransformedPackets::asByteArrayStream) + .map(d -> convertRequest(tuple.context, d.collect(Collectors.toList()))); } private static Optional> getSourceResponseOp(SourceTargetCaptureTuple tuple, Optional sourcePairOp) { return sourcePairOp.flatMap(p -> Optional.ofNullable(p.responseData).flatMap(d -> Optional.ofNullable(d.packetBytes)) - .map(d -> convertResponse(tuple.uniqueRequestKey, d, + .map(d -> convertResponse(tuple.context, d, // TODO: These durations are not measuring the same values! Duration.between(tuple.sourcePair.requestData.getLastPacketTimestamp(), tuple.sourcePair.responseData.getLastPacketTimestamp())))); } - private static Optional> getSourceRequestOp(@NonNull UniqueSourceRequestKey diagnosticKey, - Optional sourcePairOp) { + private static Optional> + getSourceRequestOp(@NonNull IReplayContexts.ITupleHandlingContext context, + Optional sourcePairOp) { return sourcePairOp.flatMap(p -> Optional.ofNullable(p.requestData).flatMap(d -> Optional.ofNullable(d.packetBytes)) - .map(d -> convertRequest(diagnosticKey, d))); + .map(d -> convertRequest(context, d))); } - public ParsedHttpMessagesAsDicts(Optional> sourceRequestOp1, + public ParsedHttpMessagesAsDicts(IReplayContexts.ITupleHandlingContext context, + Optional> sourceRequestOp1, Optional> sourceResponseOp2, Optional> targetRequestOp3, Optional> targetResponseOp4) { + this.context = context; this.sourceRequestOp = sourceRequestOp1; this.sourceResponseOp = sourceResponseOp2; this.targetRequestOp = targetRequestOp3; this.targetResponseOp = targetResponseOp4; + fillStatusCodeMetrics(context, sourceResponseOp, targetResponseOp); } - public MetricsLogBuilder buildStatusCodeMetrics(MetricsLogger logger, UniqueSourceRequestKey requestKey) { - var builder = logger.atSuccess(MetricsEvent.STATUS_CODE_COMPARISON); - return buildStatusCodeMetrics(builder, requestKey); - } - - public MetricsLogBuilder buildStatusCodeMetrics(MetricsLogBuilder logBuilder, UniqueSourceRequestKey requestKey) { - return buildStatusCodeMetrics(logBuilder, requestKey, sourceResponseOp, targetResponseOp); - } - - public static MetricsLogBuilder buildStatusCodeMetrics(MetricsLogBuilder builder, - UniqueSourceRequestKey requestKey, - Optional> sourceResponseOp, - Optional> targetResponseOp) { - var sourceStatus = sourceResponseOp.map(r -> r.get(STATUS_CODE_KEY)); - var targetStatus = targetResponseOp.map(r -> r.get(STATUS_CODE_KEY)); - builder = builder.setAttribute(MetricsAttributeKey.REQUEST_ID, - requestKey.getTrafficStreamKey().getConnectionId() + "." + requestKey.getSourceRequestIndex()); - builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.SOURCE_HTTP_STATUS, sourceStatus); - builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.TARGET_HTTP_STATUS, targetStatus); - builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.HTTP_STATUS_MATCH, - sourceStatus.flatMap(ss -> targetStatus.map(ts -> ss.equals(ts))) - .filter(x -> x).map(b -> (Object) 1).or(() -> Optional.of(Integer.valueOf(0)))); - builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.HTTP_METHOD, - sourceResponseOp.map(r -> r.get("Method"))); - builder = MetricsLogBuilder.addMetricIfPresent(builder, MetricsAttributeKey.HTTP_ENDPOINT, - sourceResponseOp.map(r -> r.get("Request-URI"))); - return builder; + public static void fillStatusCodeMetrics(@NonNull IReplayContexts.ITupleHandlingContext context, + Optional> sourceResponseOp, + Optional> targetResponseOp) { + sourceResponseOp.ifPresent(r -> context.setMethod((String)r.get("Method"))); + sourceResponseOp.ifPresent(r -> context.setEndpoint((String)r.get("Request-URI"))); + sourceResponseOp.ifPresent(r -> context.setSourceStatus((Integer) r.get(STATUS_CODE_KEY))); + targetResponseOp.ifPresent(r -> context.setTargetStatus((Integer) r.get(STATUS_CODE_KEY))); } @@ -145,7 +130,7 @@ private static Map fillMap(LinkedHashMap map, return map; } - private static Map makeSafeMap(@NonNull UniqueSourceRequestKey diagnosticKey, + private static Map makeSafeMap(@NonNull IReplayContexts.ITupleHandlingContext context, Callable> c) { try { return c.call(); @@ -153,26 +138,29 @@ private static Map makeSafeMap(@NonNull UniqueSourceRequestKey d // TODO - this isn't a good design choice. // We should follow through with the spirit of this class and leave this as empty optional values log.atWarn().setMessage(()->"Putting what may be a bogus value in the output because transforming it " + - "into json threw an exception for "+diagnosticKey.toString()).setCause(e).log(); + "into json threw an exception for "+context).setCause(e).log(); return Map.of("Exception", (Object) e.toString()); } } - private static Map convertRequest(@NonNull UniqueSourceRequestKey diagnosticKey, + private static Map convertRequest(@NonNull IReplayContexts.ITupleHandlingContext context, @NonNull List data) { - return makeSafeMap(diagnosticKey, () -> { + return makeSafeMap(context, () -> { var map = new LinkedHashMap(); var message = HttpByteBufFormatter.parseHttpRequestFromBufs(byteToByteBufStream(data), true); map.put("Request-URI", message.uri()); map.put("Method", message.method().toString()); map.put("HTTP-Version", message.protocolVersion().toString()); + context.setMethod(message.method().toString()); + context.setEndpoint(message.uri()); + context.setHttpVersions(message.protocolVersion().toString()); return fillMap(map, message.headers(), message.content()); }); } - private static Map convertResponse(@NonNull UniqueSourceRequestKey diagnosticKey, + private static Map convertResponse(@NonNull IReplayContexts.ITupleHandlingContext context, @NonNull List data, Duration latency) { - return makeSafeMap(diagnosticKey, () -> { + return makeSafeMap(context, () -> { var map = new LinkedHashMap(); var message = HttpByteBufFormatter.parseHttpResponseFromBufs(byteToByteBufStream(data), true); map.put("HTTP-Version", message.protocolVersion()); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index c3712409b..59e29a1cb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -135,7 +135,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - scheduleRequest(UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext ctx, + scheduleRequest(IReplayContexts.IReplayerHttpTransactionContext ctx, Instant originalStart, Instant originalEnd, int numPackets, Stream packets) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); @@ -143,6 +143,7 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant var start = timeShifter.transformSourceTimeToRealTime(originalStart); var end = timeShifter.transformSourceTimeToRealTime(originalEnd); var interval = numPackets > 1 ? Duration.between(start, end).dividedBy(numPackets-1L) : Duration.ZERO; + var requestKey = ctx.getReplayerRequestKey(); logStartOfWork(requestKey, newCount, start, label); metricsLogger.atSuccess(MetricsEvent.SCHEDULED_REQUEST_TO_BE_SENT) .setAttribute(MetricsAttributeKey.REQUEST_ID, requestKey.toString()) @@ -154,11 +155,12 @@ private static void logStartOfWork(Object stringableKey, long newCount, Instant } public DiagnosticTrackableCompletableFuture - closeConnection(ISourceTrafficChannelKey channelKey, int channelInteractionNum, + closeConnection(int channelInteractionNum, IReplayContexts.IChannelKeyContext ctx, Instant timestamp) { var newCount = totalCountOfScheduledTasksOutstanding.incrementAndGet(); final String label = "close"; var atTime = timeShifter.transformSourceTimeToRealTime(timestamp); + var channelKey = ctx.getChannelKey(); logStartOfWork(new IndexedChannelInteraction(channelKey, channelInteractionNum), newCount, atTime, label); var future = networkSendOrchestrator.scheduleClose(ctx, channelInteractionNum, atTime); return hookWorkFinishingUpdates(future, timestamp, channelKey, label); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ResultsToLogsConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ResultsToLogsConsumer.java index 250bf21dd..a38bf273f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ResultsToLogsConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ResultsToLogsConsumer.java @@ -42,7 +42,7 @@ private Map toJSONObject(SourceTargetCaptureTuple tuple, ParsedH parsed.targetRequestOp.ifPresent(r -> tupleMap.put("targetRequest", r)); parsed.targetResponseOp.ifPresent(r -> tupleMap.put("targetResponse", r)); - tupleMap.put("connectionId", formatUniqueRequestKey(tuple.uniqueRequestKey)); + tupleMap.put("connectionId", formatUniqueRequestKey(tuple.getRequestKey())); Optional.ofNullable(tuple.errorCause).ifPresent(e -> tupleMap.put("error", e.toString())); return tupleMap; @@ -127,7 +127,7 @@ public static String toTransactionSummaryString(SourceTargetCaptureTuple tuple, // TARGET_LATENCY .add(t.map(r->""+r.get(ParsedHttpMessagesAsDicts.RESPONSE_TIME_MS_KEY)).orElse(MISSING_STR)) // REQUEST_ID - .add(formatUniqueRequestKey(tuple.uniqueRequestKey)) + .add(formatUniqueRequestKey(tuple.getRequestKey())) .toString(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java index 98fd89fda..f4425648f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java @@ -4,7 +4,10 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedPackets; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.datatypes.UniqueSourceRequestKey; +import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import java.time.Duration; import java.util.List; @@ -13,22 +16,22 @@ @Slf4j public class SourceTargetCaptureTuple implements AutoCloseable { - final UniqueSourceRequestKey uniqueRequestKey; final RequestResponsePacketPair sourcePair; final TransformedPackets targetRequestData; final List targetResponseData; final HttpRequestTransformationStatus transformationStatus; final Throwable errorCause; Duration targetResponseDuration; + final IReplayContexts.ITupleHandlingContext context; - public SourceTargetCaptureTuple(@NonNull UniqueSourceRequestKey uniqueRequestKey, + public SourceTargetCaptureTuple(@NonNull IReplayContexts.ITupleHandlingContext tupleHandlingContext, RequestResponsePacketPair sourcePair, TransformedPackets targetRequestData, List targetResponseData, HttpRequestTransformationStatus transformationStatus, Throwable errorCause, Duration targetResponseDuration) { - this.uniqueRequestKey = uniqueRequestKey; + this.context = tupleHandlingContext; this.sourcePair = sourcePair; this.targetRequestData = targetRequestData; this.targetResponseData = targetResponseData; @@ -39,14 +42,14 @@ public SourceTargetCaptureTuple(@NonNull UniqueSourceRequestKey uniqueRequestKey @Override public void close() { - Optional.ofNullable(targetRequestData).ifPresent(d->d.close()); + Optional.ofNullable(targetRequestData).ifPresent(TransformedPackets::close); } @Override public String toString() { return HttpByteBufFormatter.setPrintStyleFor(HttpByteBufFormatter.PacketPrintFormat.TRUNCATED, () -> { final StringJoiner sj = new StringJoiner("\n ", "SourceTargetCaptureTuple{","}"); - sj.add("diagnosticLabel=").add(uniqueRequestKey.toString()); + sj.add("diagnosticLabel=").add(context.toString()); if (sourcePair != null) { sj.add("sourcePair=").add(sourcePair.toString()); } if (targetResponseDuration != null) { sj.add("targetResponseDuration=").add(targetResponseDuration+""); } Optional.ofNullable(targetRequestData).ifPresent(d-> sj.add("targetRequestData=") @@ -59,4 +62,8 @@ public String toString() { return sj.toString(); }); } + + public UniqueReplayerRequestKey getRequestKey() { + return context.getLogicalEnclosingScope().getReplayerRequestKey(); + } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index a5d75c1ab..978a2afb9 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -16,9 +16,9 @@ import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.tracing.RootOtelContext; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; @@ -75,8 +75,6 @@ @Slf4j public class TrafficReplayer { - private static final MetricsLogger TUPLE_METRICS_LOGGER = new MetricsLogger("SourceTargetCaptureTuple"); - public static final String SIGV_4_AUTH_HEADER_SERVICE_REGION_ARG = "--sigv4-auth-header-service-region"; public static final String AUTH_HEADER_VALUE_ARG = "--auth-header-value"; public static final String REMOVE_AUTH_HEADER_VALUE_ARG = "--remove-auth-header"; @@ -405,7 +403,7 @@ public static void main(String[] args) params.allowInsecureConnections, params.numClientThreads, params.maxConcurrentRequests); setupShutdownHookForReplayer(tr); - var tupleWriter = new TupleParserChainConsumer(TUPLE_METRICS_LOGGER, new ResultsToLogsConsumer()); + var tupleWriter = new TupleParserChainConsumer(new ResultsToLogsConsumer()); var timeShifter = new TimeShifter(params.speedupFactor); tr.setupRunAndWaitForReplayWithShutdownChecks(Duration.ofSeconds(params.observedPacketConnectionTimeout), blockingTrafficSource, timeShifter, tupleWriter); @@ -603,13 +601,13 @@ class TrafficReplayerAccumulationCallbacks implements AccumulationCallbacks { private ITrafficCaptureSource trafficCaptureSource; @Override - public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onRequestReceived(IReplayContexts.@NonNull IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { replayEngine.setFirstTimestamp(request.getFirstPacketTimestamp()); liveTrafficStreamLimiter.addWork(1); - var requestPushFuture = transformAndSendRequest(replayEngine, request, requestKey, ctx); + var requestPushFuture = transformAndSendRequest(replayEngine, request, ctx); + var requestKey = ctx.getReplayerRequestKey(); requestFutureMap.put(requestKey, requestPushFuture); liveRequests.put(requestKey, true); requestPushFuture.map(f->f.whenComplete((v,t)->{ @@ -622,13 +620,13 @@ public void onRequestReceived(@NonNull UniqueReplayerRequestKey requestKey, } @Override - public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onFullDataReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrPair) { - log.atInfo().setMessage(()->"Done receiving captured stream for " + requestKey + + log.atInfo().setMessage(()->"Done receiving captured stream for " + ctx + ":" + rrPair.requestData).log(); + var requestKey = ctx.getReplayerRequestKey(); var resultantCf = requestFutureMap.remove(requestKey) - .map(f -> f.handle((summary,t)->handleCompletedTransaction(ctx, requestKey, rrPair, summary, t)), + .map(f -> f.handle((summary,t)->handleCompletedTransaction(ctx, rrPair, summary, t)), () -> "TrafficReplayer.runReplayWithIOStreams.progressTracker"); if (!resultantCf.future.isDone()) { log.trace("Adding " + requestKey + " to targetTransactionInProgressMap"); @@ -639,23 +637,22 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, } } - Void handleCompletedTransaction(IReplayContexts.IReplayerHttpTransactionContext context, - @NonNull UniqueReplayerRequestKey requestKey, + Void handleCompletedTransaction(@NonNull IReplayContexts.IReplayerHttpTransactionContext context, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Throwable t) { - var httpContext = rrPair.getHttpTransactionContext(); - try { + try (var httpContext = rrPair.getHttpTransactionContext()) { // if this comes in with a serious Throwable (not an Exception), don't bother // packaging it up and calling the callback. // Escalate it up out handling stack and shutdown. if (t == null || t instanceof Exception) { try (var tupleHandlingContext = httpContext.createTupleContext()) { - packageAndWriteResponse(resultTupleConsumer, requestKey, rrPair, summary, (Exception) t); + packageAndWriteResponse(tupleHandlingContext, resultTupleConsumer, + rrPair, summary, (Exception) t); } commitTrafficStreams(rrPair.completionStatus, rrPair.trafficStreamKeysBeingHeld); return null; } else { - log.atError().setCause(t).setMessage(()->"Throwable passed to handle() for " + requestKey + + log.atError().setCause(t).setMessage(() -> "Throwable passed to handle() for " + context + ". Rethrowing.").log(); throw Lombok.sneakyThrow(t); } @@ -671,12 +668,12 @@ Void handleCompletedTransaction(IReplayContexts.IReplayerHttpTransactionContext .setMessage("Unexpected exception while sending the " + "aggregated response and context for {} to the callback. " + "Proceeding, but the tuple receiver context may be compromised.") - .addArgument(requestKey) + .addArgument(context) .setCause(e) .log(); throw e; } finally { - httpContext.close(); + var requestKey = context.getReplayerRequestKey(); requestToFinalWorkFuturesMap.remove(requestKey); log.trace("removed rrPair.requestData to " + "targetTransactionInProgressMap for " + @@ -686,7 +683,7 @@ Void handleCompletedTransaction(IReplayContexts.IReplayerHttpTransactionContext @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.@NonNull IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { commitTrafficStreams(status, trafficStreamKeysBeingHeld); } @@ -710,44 +707,43 @@ private void commitTrafficStreams(boolean shouldCommit, } @Override - public void onConnectionClose(@NonNull ISourceTrafficChannelKey channelKey, - int channelInteractionNum, - IReplayContexts.IChannelKeyContext ctx, + public void onConnectionClose(int channelInteractionNum, + IReplayContexts.@NonNull IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant timestamp, @NonNull List trafficStreamKeysBeingHeld) { replayEngine.setFirstTimestamp(timestamp); - var cf = replayEngine.closeConnection(channelKey, channelInteractionNum, ctx, timestamp); + var cf = replayEngine.closeConnection(channelInteractionNum, ctx, timestamp); cf.map(f->f.whenComplete((v,t)->{ commitTrafficStreams(status, trafficStreamKeysBeingHeld); }), ()->"closing the channel in the ReplayEngine"); } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, IReplayContexts.IChannelKeyContext ctx) { - commitTrafficStreams(true, List.of(tsk)); + public void onTrafficStreamIgnored(@NonNull IReplayContexts.ITrafficStreamsLifecycleContext ctx) { + commitTrafficStreams(true, List.of(ctx.getTrafficStreamKey())); } private TransformedTargetRequestAndResponse - packageAndWriteResponse(Consumer tupleWriter, - @NonNull UniqueReplayerRequestKey requestKey, + packageAndWriteResponse(IReplayContexts.ITupleHandlingContext tupleHandlingContext, + Consumer tupleWriter, RequestResponsePacketPair rrPair, TransformedTargetRequestAndResponse summary, Exception t) { log.trace("done sending and finalizing data to the packet handler"); - try (var requestResponseTuple = getSourceTargetCaptureTuple(requestKey, rrPair, summary, t)) { + try (var requestResponseTuple = getSourceTargetCaptureTuple(tupleHandlingContext, rrPair, summary, t)) { log.atInfo().setMessage(()->"Source/Target Request/Response tuple: " + requestResponseTuple).log(); tupleWriter.accept(requestResponseTuple); } if (t != null) { throw new CompletionException(t); } if (summary.getError() != null) { - log.atInfo().setCause(summary.getError()).setMessage("Exception for request {}: ") - .addArgument(requestKey).log(); + log.atInfo().setCause(summary.getError()).setMessage("Exception for {}: ") + .addArgument(tupleHandlingContext).log(); exceptionRequestCount.incrementAndGet(); } else if (summary.getTransformationStatus() == HttpRequestTransformationStatus.ERROR) { - log.atInfo().setCause(summary.getError()).setMessage("Unknown error transforming the request {}: ") - .addArgument(requestKey).log(); + log.atInfo().setCause(summary.getError()).setMessage("Unknown error transforming {}: ") + .addArgument(tupleHandlingContext).log(); exceptionRequestCount.incrementAndGet(); } else { successfulRequestCount.incrementAndGet(); @@ -851,19 +847,19 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture(), HttpRequestTransformationStatus.ERROR, t, Duration.ZERO); } else { - requestResponseTriple = new SourceTargetCaptureTuple(uniqueRequestKey, rrPair, + requestResponseTuple = new SourceTargetCaptureTuple(tupleHandlingContext, rrPair, summary.requestPackets, summary.getReceiptTimeAndResponsePackets() .map(Map.Entry::getValue).collect(Collectors.toList()), @@ -872,14 +868,14 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture transformAndSendRequest(ReplayEngine replayEngine, HttpMessageAndTimestamp request, - UniqueReplayerRequestKey requestKey, IReplayContexts.IReplayerHttpTransactionContext ctx) { + IReplayContexts.IReplayerHttpTransactionContext ctx) { return transformAndSendRequest(inputRequestTransformerFactory, replayEngine, ctx, - request.getFirstPacketTimestamp(), request.getLastPacketTimestamp(), requestKey, + request.getFirstPacketTimestamp(), request.getLastPacketTimestamp(), request.packetBytes::stream); } @@ -887,19 +883,18 @@ private static String formatWorkItem(DiagnosticTrackableCompletableFuture> packetsSupplier) { try { var transformationCompleteFuture = replayEngine.scheduleTransformationWork(ctx, start, ()-> - transformAllData(inputRequestTransformerFactory.create(requestKey, ctx), packetsSupplier)); - log.atDebug().setMessage(()->"finalizeRequest future for transformation of " + requestKey + + transformAllData(inputRequestTransformerFactory.create(ctx), packetsSupplier)); + log.atDebug().setMessage(()->"finalizeRequest future for transformation of " + ctx + " = " + transformationCompleteFuture).log(); // It might be safer to chain this work directly inside the scheduleWork call above so that the // read buffer horizons aren't set after the transformation work finishes, but after the packets // are fully handled return transformationCompleteFuture.thenCompose(transformedResult -> - replayEngine.scheduleRequest(requestKey, ctx, start, end, + replayEngine.scheduleRequest(ctx, start, end, transformedResult.transformedOutput.size(), transformedResult.transformedOutput.streamRetained()) .map(future->future.thenApply(t -> diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java index 365e5cf80..da7ebfbb8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java @@ -8,21 +8,16 @@ import java.util.function.Consumer; public class TupleParserChainConsumer implements Consumer { - private final MetricsLogger optionalMetricsLoggerToEmitStats; private final BiConsumer innerConsumer; - public TupleParserChainConsumer(MetricsLogger optionalMetricsLoggerToEmitStats, - @NonNull BiConsumer innerConsumer) { - this.optionalMetricsLoggerToEmitStats = optionalMetricsLoggerToEmitStats; this.innerConsumer = innerConsumer; } @Override public void accept(SourceTargetCaptureTuple tuple) { var parsedMsgs = new ParsedHttpMessagesAsDicts(tuple); - Optional.ofNullable(optionalMetricsLoggerToEmitStats) - .ifPresent(ml->parsedMsgs.buildStatusCodeMetrics(ml, tuple.uniqueRequestKey).emit()); innerConsumer.accept(tuple, parsedMsgs); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index d9e2b911e..bf27f5859 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -9,6 +9,7 @@ import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.time.Instant; +import java.util.Optional; public abstract class IReplayContexts { @@ -27,7 +28,7 @@ private ActivityNames() {} public static final String REQUEST_SENDING = "requestSending"; public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; public static final String RECEIVING_RESPONSE = "receivingResponse"; - public static final String TUPLE_HANDLING = "tupleHandling"; + public static final String TUPLE_HANDLING = "finalizingResults"; } public static class MetricNames { @@ -56,6 +57,7 @@ private MetricNames() {} public static final String ACTIVE_TARGET_CONNECTIONS = "activeTargetConnections"; public static final String BYTES_WRITTEN_TO_TARGET = "bytesWrittenToTarget"; public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; + public static final String STATUS_MATCH = "statusMatch"; } public interface IAccumulationScope extends IScopedInstrumentationAttributes { @@ -256,6 +258,26 @@ public interface ITupleHandlingContext extends IAccumulationScope, IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TUPLE_HANDLING; + AttributeKey SOURCE_STATUS_CODE_KEY = AttributeKey.longKey("sourceStatusCode"); + AttributeKey TARGET_STATUS_CODE_KEY = AttributeKey.longKey("targetStatusCode"); + AttributeKey STATUS_CODE_MATCH_KEY = AttributeKey.booleanKey("statusCodesMatch"); + AttributeKey METHOD_KEY = AttributeKey.stringKey("method"); + AttributeKey HTTP_VERSION_KEY = AttributeKey.stringKey("version"); // for the span, not metric + AttributeKey ENDPOINT_KEY = AttributeKey.stringKey("endpoint"); // for the span, not metric + @Override default String getActivityName() { return ACTIVITY_NAME; } + + void setSourceStatus(Integer sourceStatus); + void setTargetStatus(Integer targetStatus); + + void setMethod(String method); + + void setEndpoint(String endpointUrl); + + void setHttpVersions(String string); + + default UniqueReplayerRequestKey getReplayerRequestKey() { + return getLogicalEnclosingScope().getReplayerRequestKey(); + } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index dd77b38a7..110e0af43 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -1,11 +1,14 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; import lombok.Getter; import lombok.NonNull; +import lombok.Setter; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; @@ -16,6 +19,8 @@ import java.time.Duration; import java.time.Instant; +import java.util.Optional; +import java.util.stream.Stream; public abstract class ReplayContexts extends IReplayContexts { @@ -586,17 +591,39 @@ private MetricInstruments(Meter meter, String activityName) { } + @Getter + @Setter public static class TupleHandlingContext extends DirectNestedSpanContext implements IReplayContexts.ITupleHandlingContext { + Integer sourceStatus; + Integer targetStatus; + String method; + String httpVersion; + public TupleHandlingContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } + @Override + public void close() { + super.close(); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { + //private final LongCounter statusMatchCounter; + private final LongCounter resultCounter; +// private final LongCounter sourceStatus; +// private final LongCounter targetStatus; +// private final LongCounter methodCounter; private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); + //statusMatchCounter = meter.counterBuilder(MetricNames.STATUS_MATCH).build(); +// sourceStatus = meter.counterBuilder("sourceStatus").build(); +// targetStatus = meter.counterBuilder("targetStatus").build(); +// methodCounter = meter.counterBuilder("method").build(); + resultCounter = meter.counterBuilder("tupleResult").build(); } } @@ -608,5 +635,46 @@ private MetricInstruments(Meter meter, String activityName) { return getRootInstrumentationScope().tupleHandlingInstruments; } + @Override + public void sendMeterEventsForEnd() { + super.sendMeterEventsForEnd(); + final var sourceOp = Optional.ofNullable(sourceStatus); + final var targetOp = Optional.ofNullable(targetStatus); + final boolean didMatch = sourceOp.flatMap(ss -> targetOp.map(ss::equals)).orElse(false); + AttributesBuilder attributesBuilderForAggregate = + addAttributeIfPresent(addAttributeIfPresent(addAttributeIfPresent( + Attributes.builder(), + METHOD_KEY, Optional.ofNullable(method)), + SOURCE_STATUS_CODE_KEY, sourceOp.map(TupleHandlingContext::categorizeStatus)), + TARGET_STATUS_CODE_KEY, targetOp.map(TupleHandlingContext::categorizeStatus)) + .put(STATUS_CODE_MATCH_KEY, didMatch); + + getCurrentSpan().setAllAttributes(attributesBuilderForAggregate.build()); + meterIncrementEvent(getMetrics().resultCounter, 1, attributesBuilderForAggregate); + } + + /** + * Convert everything in the 2xx range to 200; 300-399 to 300 + * @param status + * @return + */ + public static long categorizeStatus(int status) { + return (status / 100L) * 100L; + } + + @Override + public void setEndpoint(String endpointUrl) { + getCurrentSpan().setAttribute(ENDPOINT_KEY, endpointUrl); + } + + @Override + public void setHttpVersions(String httpVersion) { + getCurrentSpan().setAttribute(HTTP_VERSION_KEY, httpVersion); + } + + @Override + public String toString() { + return getReplayerRequestKey().toString(); + } } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java index 9676a1aad..4ea891d70 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -9,6 +9,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; @@ -100,8 +101,7 @@ public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) thro try (var blockingTrafficSource = new BlockingTrafficSource(trafficSource, Duration.ofMinutes(2))) { tr.setupRunAndWaitForReplayWithShutdownChecks(Duration.ofSeconds(70), blockingTrafficSource, new TimeShifter(10 * 1000), (t) -> { - var key = t.uniqueRequestKey; - var wasNew = tuplesReceived.add(key.toString()); + var wasNew = tuplesReceived.add(t.getRequestKey().toString()); Assertions.assertTrue(wasNew); }); } finally { @@ -134,7 +134,7 @@ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSp chk.accept(numRequests, "targetTransaction"); chk.accept(numRequests*2, "scheduled"); chk.accept(numRequests, "requestSending"); - chk.accept(numRequests, "tupleHandling"); + chk.accept(numRequests, "finalizingResults"); Consumer chkNonZero = k-> { Assertions.assertNotNull(byName.get(k)); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index 7b55030ce..6aea2cb7e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -71,7 +71,7 @@ public Consumer get() { log.info("StopAt="+nextStopPointRef.get()); var stopPoint = nextStopPointRef.get(); return tuple -> { - var key = tuple.uniqueRequestKey; + var key = tuple.getRequestKey(); if (((TrafficStreamCursorKey) (key.getTrafficStreamKey())).arrayIndex > stopPoint) { log.error("Request received after our ingest threshold. Throwing. Discarding " + key); var nextStopPoint = stopPoint + new Random(stopPoint).nextInt(stopPoint + 1); @@ -123,10 +123,7 @@ public TrafficStreamCursorKey(TestContext context, TrafficStream stream, int arr nodeId = stream.getNodeId(); trafficStreamIndex = TrafficStreamUtils.getTrafficStreamIndex(stream); this.arrayIndex = arrayIndex; - var key = PojoTrafficStreamKeyAndContext.build(nodeId, connectionId, trafficStreamIndex, - context::createTrafficStreamContextForTest); - trafficStreamsContext = key.getTrafficStreamsContext(); - key.setTrafficStreamsContext(trafficStreamsContext); + trafficStreamsContext = context.createTrafficStreamContextForTest(this); } @Override diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java index a6955fdf8..3dfb612f1 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/HeaderTransformerTest.java @@ -8,7 +8,6 @@ import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.StaticAuthTransformerFactory; import java.time.Duration; @@ -36,7 +35,7 @@ public void testTransformer() throws Exception { var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), dummyAggregatedResponse); var transformer = new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME); var transformingHandler = new HttpJsonTransformingConsumer(transformer, null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + "HoSt: " + SOURCE_CLUSTER_NAME + "\r\n" + @@ -88,7 +87,7 @@ public void testMalformedPayloadIsPassedThrough() throws Exception { var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME), httpBasicAuthTransformer, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); runRandomPayloadWithTransformer(transformingHandler, dummyAggregatedResponse, testPacketCapture, contentLength -> "GET / HTTP/1.1\r\n" + @@ -114,7 +113,7 @@ public void testMalformedPayload_andTypeMappingUri_IsPassedThrough() throws Exce var transformingHandler = new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader(SILLY_TARGET_CLUSTER_NAME, null, "[{\"JsonTransformerForOpenSearch23PlusTargetTransformerProvider\":\"\"}]"), - null, testPacketCapture, TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + null, testPacketCapture, rootContext.getTestConnectionRequestContext(0)); Random r = new Random(2); var stringParts = IntStream.range(0, 1).mapToObj(i-> TestUtils.makeRandomString(r, 10)).map(o->(String)o) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index d85044fea..148f15e39 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -10,7 +10,9 @@ import org.apache.kafka.clients.producer.ProducerConfig; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.kafka.KafkaTestUtils; import org.opensearch.migrations.replay.kafka.KafkaTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; @@ -67,7 +69,7 @@ public Consumer get() { return tuple -> { if (counter.incrementAndGet() > stopPoint) { log.warn("Request received after our ingest threshold. Throwing. Discarding " + - tuple.uniqueRequestKey); + tuple.context); var nextStopPoint = stopPoint + new Random(stopPoint).nextInt(stopPoint + 1); nextStopPointRef.compareAndSet(stopPoint, nextStopPoint); throw new TrafficReplayerRunner.FabricatedErrorToKillTheReplayer(false); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index 89688a605..d3a89abf3 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -1,8 +1,8 @@ package org.opensearch.migrations.replay; +import lombok.NonNull; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.MockMetricsBuilder; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import org.opensearch.migrations.tracing.InstrumentationTest; @@ -17,68 +17,16 @@ ParsedHttpMessagesAsDicts makeTestData() { return makeTestData(null, null); } - ParsedHttpMessagesAsDicts makeTestData(Map sourceResponse, Map targetResponse) { - return new ParsedHttpMessagesAsDicts( - Optional.empty(), Optional.ofNullable(sourceResponse), - Optional.empty(), Optional.ofNullable(targetResponse)); - } - - String getLoggedMetrics(ParsedHttpMessagesAsDicts parsedMessage) { - var metricsBuilder = new MockMetricsBuilder(); - var TEST_TRAFFIC_STREAM_KEY = - PojoTrafficStreamKeyAndContext.build("N","C",1, - k->rootContext.createTrafficStreamContextForStreamSource(rootContext.createChannelContext(k), k)); - metricsBuilder = (MockMetricsBuilder) parsedMessage.buildStatusCodeMetrics(metricsBuilder, - new PojoUniqueSourceRequestKey(TEST_TRAFFIC_STREAM_KEY, 0)); - return metricsBuilder.getLoggedAttributes(); - } - - @Test - public void testMetricsAreRightWhenBothAreEmpty() { - var loggedMetrics = getLoggedMetrics(makeTestData()); - Assertions.assertEquals("REQUEST_ID:C.0|HTTP_STATUS_MATCH:0", loggedMetrics); - } - - @Test - public void testMetricsAreRightWhenSourceIsEmpty() { - var loggedMetrics = getLoggedMetrics(makeTestData( - null, - Map.of("Status-Code", Integer.valueOf(200)) - )); - Assertions.assertEquals("REQUEST_ID:C.0|TARGET_HTTP_STATUS:200|HTTP_STATUS_MATCH:0", loggedMetrics); + @Override + protected TestContext makeContext() { + return TestContext.withTracking(false, true); } - @Test - public void testMetricsAreRightWhenTargetIsEmpty() { - var loggedMetrics = getLoggedMetrics(makeTestData( - Map.of("Status-Code", Integer.valueOf(200)), - null - )); - Assertions.assertEquals("REQUEST_ID:C.0|SOURCE_HTTP_STATUS:200|HTTP_STATUS_MATCH:0", loggedMetrics); - } - - @Test - public void testMetricsAreRightWhenDifferent() { - var loggedMetrics = getLoggedMetrics(makeTestData( - Map.of("Status-Code", Integer.valueOf(200)), - Map.of("Status-Code", Integer.valueOf(200)) - )); - Assertions.assertEquals("REQUEST_ID:C.0|SOURCE_HTTP_STATUS:200|TARGET_HTTP_STATUS:200|HTTP_STATUS_MATCH:1", loggedMetrics); - } - - @Test - public void testMetricsAreRightWhenMissing() { - var loggedMetrics = getLoggedMetrics(makeTestData( - Map.of("Status-Code", Integer.valueOf(200)), - Map.of("Status-Code", Integer.valueOf(404)))); - Assertions.assertEquals("REQUEST_ID:C.0|SOURCE_HTTP_STATUS:200|TARGET_HTTP_STATUS:404|HTTP_STATUS_MATCH:0", loggedMetrics); + ParsedHttpMessagesAsDicts makeTestData(Map sourceResponse, + Map targetResponse) { + return new ParsedHttpMessagesAsDicts(rootContext.getTestTupleContext(), + Optional.empty(), Optional.ofNullable(sourceResponse), + Optional.empty(), Optional.ofNullable(targetResponse)); } - @Test - public void testMetricsAreRightWithMissingStatusCode() { - var loggedMetrics = getLoggedMetrics(makeTestData( - Map.of("Sorry", "exception message..."), - Map.of("Status-Code", Integer.valueOf(404)))); - Assertions.assertEquals("REQUEST_ID:C.0|TARGET_HTTP_STATUS:404|HTTP_STATUS_MATCH:0", loggedMetrics); - } } \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java index 5b223d522..7f01c7d56 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/RequestSenderOrchestratorTest.java @@ -12,7 +12,6 @@ import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; import java.time.Duration; @@ -41,7 +40,7 @@ public void testThatSchedulingWorks() throws Exception { Instant lastEndTime = baseTime; var scheduledItems = new ArrayList>(); for (int i = 0; i logEvents = new ArrayList<>(); AbstractAppender testAppender; @@ -78,12 +83,13 @@ public void testTupleNewWithNullKeyThrows() { @Test public void testOutputterWithNulls() throws IOException { - var emptyTuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID, "c", 0, - rootContext::createTrafficStreamContextForTest), 0, 0), + + var urk = new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID, "c", 0, + rootContext::createTrafficStreamContextForTest), 0, 0); + var emptyTuple = new SourceTargetCaptureTuple(rootContext.getTestTupleContext(), null, null, null, null, null, null); try (var closeableLogSetup = new CloseableLogSetup()) { - var consumer = new TupleParserChainConsumer(null, new ResultsToLogsConsumer()); + var consumer = new TupleParserChainConsumer(new ResultsToLogsConsumer()); consumer.accept(emptyTuple); Assertions.assertEquals(1, closeableLogSetup.logEvents.size()); var contents = closeableLogSetup.logEvents.get(0); @@ -95,13 +101,11 @@ public void testOutputterWithNulls() throws IOException { @Test public void testOutputterWithException() throws IOException { var exception = new Exception(TEST_EXCEPTION_MESSAGE); - var emptyTuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(PojoTrafficStreamKeyAndContext.build(NODE_ID, "c", 0, - rootContext::createTrafficStreamContextForTest), 0, 0), + var emptyTuple = new SourceTargetCaptureTuple(rootContext.getTestTupleContext(), null, null, null, null, exception, null); try (var closeableLogSetup = new CloseableLogSetup()) { - var consumer = new TupleParserChainConsumer(null, new ResultsToLogsConsumer()); + var consumer = new TupleParserChainConsumer(new ResultsToLogsConsumer()); consumer.accept(emptyTuple); Assertions.assertEquals(1, closeableLogSetup.logEvents.size()); var contents = closeableLogSetup.logEvents.get(0); @@ -170,7 +174,7 @@ public void testOutputterForGet() throws IOException { " \"Funtime\": \"checkIt!\",\n" + " \"content-length\": \"30\"\n" + " },\n" + - " \"connectionId\": \"c.0\"\n" + + " \"connectionId\": \"testConnection.1\"\n" + "}"; testOutputterForRequest("get_withAuthHeader.txt", EXPECTED_LOGGED_OUTPUT); } @@ -225,7 +229,7 @@ public void testOutputterForPost() throws IOException { " \"Funtime\": \"checkIt!\",\n" + " \"content-length\": \"30\"\n" + " },\n" + - " \"connectionId\": \"c.0\"\n" + + " \"connectionId\": \"testConnection.1\"\n" + "}"; testOutputterForRequest("post_formUrlEncoded_withFixedLength.txt", EXPECTED_LOGGED_OUTPUT); } @@ -245,31 +249,31 @@ public void testOutputterForRequest(String requestResourceName, String expected) var targetResponse = new ArrayList(); targetResponse.add(rawResponseData); - var tuple = new SourceTargetCaptureTuple( - new UniqueReplayerRequestKey(trafficStreamKey, 0, 0), - sourcePair, targetRequest, targetResponse, HttpRequestTransformationStatus.SKIPPED, null, Duration.ofMillis(267)); - var streamConsumer = new ResultsToLogsConsumer(); - // we don't have an interface on MetricsLogger yet, so it's a challenge to test that directly. - // Assuming that it's going to use Slf/Log4J are really brittle. I'd rather miss a couple lines that - // should be getting tested elsewhere and be immune to those changes down the line - BiConsumer metricsHardWayCheckConsumer = (t,p) -> { - var metricsBuilder = new MockMetricsBuilder(); - metricsBuilder = (MockMetricsBuilder) p.buildStatusCodeMetrics(metricsBuilder, - new PojoUniqueSourceRequestKey(trafficStreamKey, 0)); - Assertions.assertEquals("REQUEST_ID:c.0|SOURCE_HTTP_STATUS:200|TARGET_HTTP_STATUS:200|HTTP_STATUS_MATCH:1", - metricsBuilder.getLoggedAttributes()); - }; - try (var closeableLogSetup = new CloseableLogSetup()) { - var consumer = new TupleParserChainConsumer(null, (a,b)->{ - streamConsumer.accept(a,b); - metricsHardWayCheckConsumer.accept(a,b); - }); + try (var tupleContext = rootContext.getTestTupleContext(); + var closeableLogSetup = new CloseableLogSetup()) { + var tuple = new SourceTargetCaptureTuple(tupleContext, + sourcePair, targetRequest, targetResponse, HttpRequestTransformationStatus.SKIPPED, null, Duration.ofMillis(267)); + var streamConsumer = new ResultsToLogsConsumer(); + var consumer = new TupleParserChainConsumer(streamConsumer); consumer.accept(tuple); Assertions.assertEquals(1, closeableLogSetup.logEvents.size()); var contents = closeableLogSetup.logEvents.get(0); - log.info("Output="+contents); + log.info("Output=" + contents); Assertions.assertEquals(normalizeJson(expected), normalizeJson(contents)); } + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + var allMetricData = rootContext.inMemoryInstrumentationBundle.testMetricExporter.getFinishedMetricItems(); + var filteredMetrics = allMetricData.stream().filter(md->md.getName().startsWith("tupleResult")) + .collect(Collectors.toList()); + // TODO - find out how to verify these metrics + log.error("TODO - find out how to verify these metrics"); +// Assertions.assertEquals("REQUEST_ID:testConnection.1|SOURCE_HTTP_STATUS:200|TARGET_HTTP_STATUS:200|HTTP_STATUS_MATCH:1", +// filteredMetrics.stream().map(md->md.getName()+":"+md.getData()).collect(Collectors.joining("|"))); + } static String normalizeJson(String input) throws JsonProcessingException { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 46d935059..20b9658a6 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -224,17 +224,15 @@ void generateAndTest(String testName, int bufferSize, int skipCount, new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onRequestReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { requestsReceived.incrementAndGet(); } @Override - public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onFullDataReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair fullPair) { - var sourceIdx = requestKey.getSourceRequestIndex(); + var sourceIdx = ctx.getReplayerRequestKey().getSourceRequestIndex(); if (fullPair.completionStatus == RequestResponsePacketPair.ReconstructionStatus.CLOSED_PREMATURELY) { return; @@ -253,20 +251,19 @@ public void onFullDataReceived(@NonNull UniqueReplayerRequestKey requestKey, @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.@NonNull IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) {} @Override - public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IReplayContexts.IChannelKeyContext ctx, + public void onConnectionClose(int channelInteractionNumber, + @NonNull IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } - @Override public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx) { - tsIndicesReceived.add(tsk.getTrafficStreamIndex()); + @Override public void onTrafficStreamIgnored(@NonNull IReplayContexts.ITrafficStreamsLifecycleContext ctx) { + tsIndicesReceived.add(ctx.getTrafficStreamKey().getTrafficStreamIndex()); } }); var tsList = trafficStreams.collect(Collectors.toList()); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index 3add84204..a5697b8d8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -5,6 +5,7 @@ import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.tracing.TestContext; @@ -58,7 +59,7 @@ static void runReplayerUntilSourceWasExhausted(TestContext rootContext, int numE return; } Assertions.assertEquals(runNumber, runNumberRef.get()); - var key = t.uniqueRequestKey; + var key = t.getRequestKey(); ISourceTrafficChannelKey tsk = key.getTrafficStreamKey(); var keyString = tsk.getConnectionId() + "_" + key.getSourceRequestIndex(); var prevKeyString = tsk.getConnectionId() + "_" + (key.getSourceRequestIndex()-1); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index a543c80b0..9472fc187 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -159,8 +159,7 @@ public void testReader() throws Exception { new CapturedTrafficToHttpTransactionAccumulator(Duration.ofSeconds(30), null, new AccumulationCallbacks() { @Override - public void onRequestReceived(@NonNull UniqueReplayerRequestKey id, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onRequestReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); @@ -168,30 +167,29 @@ public void onRequestReceived(@NonNull UniqueReplayerRequestKey id, } @Override - public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onFullDataReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair fullPair) { - var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); + var responseBytes = new ArrayList(fullPair.responseData.packetBytes); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); } @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + IReplayContexts.@NonNull IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { } @Override - public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IReplayContexts.IChannelKeyContext ctx, + public void onConnectionClose(int channelInteractionNumber, + IReplayContexts.@NonNull IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx) { + public void onTrafficStreamIgnored(@NonNull IReplayContexts.ITrafficStreamsLifecycleContext ctx) { + } }); var bytes = synthesizeTrafficStreamsIntoByteArray(Instant.now(), 1); @@ -217,8 +215,7 @@ public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { "CapturedTrafficToHttpTransactionAccumulator that's being used in this unit test!", new AccumulationCallbacks() { @Override - public void onRequestReceived(@NonNull UniqueReplayerRequestKey id, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onRequestReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request) { var bytesList = request.stream().collect(Collectors.toList()); byteArrays.add(bytesList); @@ -226,29 +223,28 @@ public void onRequestReceived(@NonNull UniqueReplayerRequestKey id, } @Override - public void onFullDataReceived(@NonNull UniqueReplayerRequestKey key, - IReplayContexts.IReplayerHttpTransactionContext ctx, + public void onFullDataReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair fullPair) { - var responseBytes = fullPair.responseData.packetBytes.stream().collect(Collectors.toList()); + var responseBytes = new ArrayList(fullPair.responseData.packetBytes); Assertions.assertEquals(FAKE_READ_PACKET_DATA, collectBytesToUtf8String(responseBytes)); } @Override public void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, - IReplayContexts.IChannelKeyContext ctx, + @NonNull IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld) { } @Override - public void onConnectionClose(@NonNull ISourceTrafficChannelKey key, int channelInteractionNumber, - IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, + public void onConnectionClose(int channelInteractionNumber, + @NonNull IReplayContexts.IChannelKeyContext ctx, + RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld) { } @Override - public void onTrafficStreamIgnored(@NonNull ITrafficStreamKey tsk, - IReplayContexts.IChannelKeyContext ctx) { + public void onTrafficStreamIgnored(@NonNull IReplayContexts.ITrafficStreamsLifecycleContext ctx) { } } ); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 265562f57..3f9118b12 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -16,7 +16,6 @@ import org.opensearch.migrations.replay.PacketToTransformingHttpHandlerFactory; import org.opensearch.migrations.replay.ReplayEngine; import org.opensearch.migrations.replay.RequestSenderOrchestrator; -import org.opensearch.migrations.replay.TestRequestKey; import org.opensearch.migrations.replay.TimeShifter; import org.opensearch.migrations.replay.TrafficReplayer; import org.opensearch.migrations.replay.TransformationLoader; @@ -28,7 +27,6 @@ import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import javax.net.ssl.SSLException; import java.io.IOException; @@ -132,7 +130,7 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except new NioEventLoopGroup(4, new DefaultThreadFactory("test")), testServer.localhostEndpoint(), sslContext, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); nphc.consumeBytes((EXPECTED_REQUEST_STRING).getBytes(StandardCharsets.UTF_8)); var aggregatedResponse = nphc.finalizeRequest().get(); var responseBytePackets = aggregatedResponse.getCopyOfPackets(); @@ -161,9 +159,9 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) new TestFlowController(), timeShifter); for (int j = 0; j < 2; ++j) { for (int i = 0; i < 2; ++i) { - var ctx = TestRequestKey.getTestConnectionRequestContext(rootContext, "TEST_" + i, j); + var ctx = rootContext.getTestConnectionRequestContext("TEST_" + i, j); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, - sendingFactory, ctx, Instant.now(), Instant.now(), ctx.getReplayerRequestKey(), + sendingFactory, ctx, Instant.now(), Instant.now(), () -> Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); log.info("requestFinishFuture=" + requestFinishFuture); var aggregatedResponse = requestFinishFuture.get(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java index b6157526c..f3ee45482 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java @@ -4,12 +4,10 @@ import org.junit.jupiter.api.Test; import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.TestCapturePacketToHttpHandler; -import org.opensearch.migrations.replay.TestRequestKey; import org.opensearch.migrations.replay.TransformationLoader; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.transform.IJsonTransformer; import org.opensearch.migrations.transform.JsonCompositeTransformer; import org.opensearch.migrations.transform.RemovingAuthTransformerFactory; @@ -30,7 +28,7 @@ public void testPassThroughSinglePacketPost() throws Exception { new HttpJsonTransformingConsumer(new TransformationLoader() .getTransformerFactoryLoader(null), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -51,7 +49,7 @@ public void testPassThroughSinglePacketWithoutBodyTransformationPost() throws Ex new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), null, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { @@ -76,7 +74,7 @@ public void testRemoveAuthHeadersWorks() throws Exception { new HttpJsonTransformingConsumer( new TransformationLoader().getTransformerFactoryLoader("test.domain"), RemovingAuthTransformerFactory.instance, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + rootContext.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/get_withAuthHeader.txt")) { @@ -115,7 +113,7 @@ private void walkMaps(Object o) { }); var transformingHandler = new HttpJsonTransformingConsumer(complexTransformer, null, - testPacketCapture, TestRequestKey.getTestConnectionRequestContext(rootContext, 0)); + testPacketCapture, rootContext.getTestConnectionRequestContext(0)); byte[] testBytes; try (var sampleStream = HttpJsonTransformingConsumer.class.getResourceAsStream( "/requests/raw/post_formUrlEncoded_withFixedLength.txt")) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datatypes/MockMetricsBuilder.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datatypes/MockMetricsBuilder.java deleted file mode 100644 index e1d2c2ed1..000000000 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datatypes/MockMetricsBuilder.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.opensearch.migrations.replay.datatypes; - -import org.opensearch.migrations.coreutils.MetricsAttributeKey; -import org.opensearch.migrations.coreutils.MetricsLogBuilder; - -import java.util.StringJoiner; - -public class MockMetricsBuilder extends MetricsLogBuilder { - StringJoiner attributeLogger = new StringJoiner("|"); - - public MockMetricsBuilder() { - super(null); - } - - @Override - public MetricsLogBuilder setAttribute(MetricsAttributeKey key, Object value) { - attributeLogger.add(key + ":" + value); - return this; - } - - public String getLoggedAttributes() { - return attributeLogger.toString(); - } -} diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java deleted file mode 100644 index 59a271d90..000000000 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestRequestKey.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.opensearch.migrations.replay; - -import java.time.Instant; - -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.ReplayContexts; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.TestContext; - -public class TestRequestKey { - - public static final String TEST_NODE_ID = "testNodeId"; - public static final String DEFAULT_TEST_CONNECTION = "testConnection"; - - private TestRequestKey() {} - - public static final IReplayContexts.IReplayerHttpTransactionContext - getTestConnectionRequestContext(TestContext ctx, int replayerIdx) { - return getTestConnectionRequestContext(ctx, DEFAULT_TEST_CONNECTION, replayerIdx); - } - - public static IReplayContexts.IReplayerHttpTransactionContext - getTestConnectionRequestContext(TestContext ctx, String connectionId, int replayerIdx) { - var rk = new UniqueReplayerRequestKey( - PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, - ctx::createTrafficStreamContextForTest), - 0, replayerIdx); - return rk.trafficStreamKey.getTrafficStreamsContext().createHttpTransactionContext(rk, Instant.EPOCH); - } -} diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java index 39ccee061..d883e7d94 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestUtils.java @@ -145,7 +145,7 @@ static void runPipelineAndValidate(TestContext rootContext, var testPacketCapture = new TestCapturePacketToHttpHandler(Duration.ofMillis(100), new AggregatedRawResponse(-1, Duration.ZERO, new ArrayList<>(), null)); var transformingHandler = new HttpJsonTransformingConsumer<>(transformer, authTransformer, testPacketCapture, - TestRequestKey.getTestConnectionRequestContext(rootContext, "TEST_CONNECTION", 0)); + rootContext.getTestConnectionRequestContext("TEST_CONNECTION", 0)); var contentLength = stringParts.stream().mapToInt(String::length).sum(); var headerString = "GET / HTTP/1.1\r\n" + diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index efa994f0d..1b8a59e1b 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -2,16 +2,19 @@ import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; -import org.junit.jupiter.api.Assertions; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; +import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import java.util.stream.Collectors; +import java.time.Instant; public class TestContext extends RootReplayerContext implements AutoCloseable { + public static final String TEST_NODE_ID = "testNodeId"; + public static final String DEFAULT_TEST_CONNECTION = "testConnection"; public final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; public final ContextTracker contextTracker = new ContextTracker(); public final ChannelContextManager channelContextManager = new ChannelContextManager(this); @@ -29,6 +32,11 @@ public static TestContext noOtelTracking() { return new TestContext(new InMemoryInstrumentationBundle(null, null)); } + public TestContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) { + super(inMemoryInstrumentationBundle.openTelemetrySdk); + this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; + } + @Override public void onContextCreated(IScopedInstrumentationAttributes newScopedContext) { contextTracker.onCreated(newScopedContext); @@ -39,11 +47,6 @@ public void onContextClosed(IScopedInstrumentationAttributes newScopedContext) { contextTracker.onClosed(newScopedContext); } - public TestContext(InMemoryInstrumentationBundle inMemoryInstrumentationBundle) { - super(inMemoryInstrumentationBundle.openTelemetrySdk); - this.inMemoryInstrumentationBundle = inMemoryInstrumentationBundle; - } - public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficStreamContextForTest(ITrafficStreamKey tsk) { return createTrafficStreamContextForStreamSource(channelContextManager.retainOrCreateContext(tsk), tsk); } @@ -53,4 +56,28 @@ public void close() { // Assertions.assertEquals("", contextTracker.getAllRemainingActiveScopes().entrySet().stream() // .map(kvp->kvp.getKey().toString()).collect(Collectors.joining())); } + + + public final IReplayContexts.IReplayerHttpTransactionContext + getTestConnectionRequestContext(int replayerIdx) { + return getTestConnectionRequestContext(DEFAULT_TEST_CONNECTION, replayerIdx); + } + + public IReplayContexts.IReplayerHttpTransactionContext + getTestConnectionRequestContext(String connectionId, int replayerIdx) { + var rk = new UniqueReplayerRequestKey( + PojoTrafficStreamKeyAndContext.build(TEST_NODE_ID, connectionId, 0, + this::createTrafficStreamContextForTest), + 0, replayerIdx); + return rk.trafficStreamKey.getTrafficStreamsContext().createHttpTransactionContext(rk, Instant.EPOCH); + } + public IReplayContexts.ITupleHandlingContext + getTestTupleContext() { + return getTestTupleContext(DEFAULT_TEST_CONNECTION, 1); + } + + public IReplayContexts.ITupleHandlingContext + getTestTupleContext(String connectionId, int replayerIdx) { + return getTestConnectionRequestContext(connectionId, replayerIdx).createTupleContext(); + } } From ffcc09b9f92c39e25dedb12cb6040caa3998dd41 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 22 Jan 2024 00:14:11 -0500 Subject: [PATCH 68/94] Minor cleanup on exception tracking Signed-off-by: Greg Schohn --- .../migrations/tracing/BaseNestedSpanContext.java | 10 ++++++++-- .../migrations/tracing/IInstrumentationAttributes.java | 6 +----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index cbe9e5a29..620640eae 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -19,8 +19,8 @@ public abstract class BaseNestedSpanContext @Getter final Instant startTime; @Getter private Span currentSpan; @Getter private final S rootInstrumentationScope; - @Getter @Setter - Exception observedExceptionToIncludeInMetrics; + @Getter + Throwable observedExceptionToIncludeInMetrics; protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder attributesBuilder, AttributeKey key, Optional value) { @@ -64,4 +64,10 @@ public void initializeSpan(@NonNull Span s) { assert currentSpan == null : "only expect to set the current span once"; currentSpan = s; } + + @Override + public void addException(Throwable e) { + IScopedInstrumentationAttributes.super.addException(e); + observedExceptionToIncludeInMetrics = e; + } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 220bfe3ae..4fa589508 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -9,12 +9,9 @@ import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; import lombok.NonNull; -import org.opensearch.migrations.coreutils.MetricsAttributeKey; -import org.opensearch.migrations.coreutils.MetricsLogBuilder; import java.time.Duration; import java.util.ArrayList; -import java.util.Optional; public interface IInstrumentationAttributes { AttributeKey HAD_EXCEPTION_KEY = AttributeKey.booleanKey("hadException"); @@ -27,8 +24,7 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { return builder; } - Exception getObservedExceptionToIncludeInMetrics(); - void setObservedExceptionToIncludeInMetrics(Exception e); + Throwable getObservedExceptionToIncludeInMetrics(); default @NonNull Attributes getPopulatedMetricAttributes(AttributesBuilder attributesBuilder) { final var e = getObservedExceptionToIncludeInMetrics(); From dd893367aba338d643254084ce3b5e36a2c24e49 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 22 Jan 2024 14:18:04 -0500 Subject: [PATCH 69/94] Bugfix - a class was inheriting from the Connection context's MetricInstruments class when it should not have been Signed-off-by: Greg Schohn --- .../org/opensearch/migrations/tracing/RootOtelContext.java | 1 - .../trafficcapture/netty/tracing/WireCaptureContexts.java | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index a4b1f03f7..81ffa036c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -27,7 +27,6 @@ import java.time.Duration; import java.util.Optional; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import java.util.stream.Stream; public class RootOtelContext implements IRootOtelContext { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java index 58da58a23..30d447077 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -127,8 +127,7 @@ public IWireCaptureContexts.IWaitingForResponseContext createWaitingForResponseC sourceRequestIndex); } - public static class MetricInstruments - extends org.opensearch.migrations.trafficcapture.tracing.ConnectionContext.MetricInstruments { + public static class MetricInstruments extends CommonScopedMetricInstruments { public final LongCounter blockingRequestCounter; public final LongCounter requestsNotOffloadedCounter; public final LongCounter fullyParsedRequestCounter; From a15d08a395a3390480b7acfc0ae48edbe9c5b654 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 22 Jan 2024 15:36:38 -0500 Subject: [PATCH 70/94] Cleanup build.gradle files' open-telemetry dependencies. Embrace otel as an api dependency for coreUtilities, since that's a tight coupling. Signed-off-by: Greg Schohn --- TrafficCapture/captureKafkaOffloader/build.gradle | 4 ---- TrafficCapture/captureOffloader/build.gradle | 3 +-- TrafficCapture/coreUtilities/build.gradle | 9 ++++++--- TrafficCapture/nettyWireLogging/build.gradle | 5 ----- TrafficCapture/trafficCaptureProxyServer/build.gradle | 7 ------- TrafficCapture/trafficReplayer/build.gradle | 8 -------- 6 files changed, 7 insertions(+), 29 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/build.gradle b/TrafficCapture/captureKafkaOffloader/build.gradle index b5a7c1ec8..82af8c0c0 100644 --- a/TrafficCapture/captureKafkaOffloader/build.gradle +++ b/TrafficCapture/captureKafkaOffloader/build.gradle @@ -9,13 +9,10 @@ repositories { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") - implementation project(':captureOffloader') implementation project(':coreUtilities') implementation group: 'com.google.protobuf', name:'protobuf-java', version:'3.22.2' api group:'io.netty', name:'netty-buffer', version: '4.1.100.Final' - implementation group: 'io.opentelemetry', name:'opentelemetry-api' implementation group: 'org.projectlombok', name:'lombok', version:'1.18.26' implementation group: 'org.apache.kafka', name:'kafka-clients', version:'3.6.0' implementation group: 'org.slf4j', name:'slf4j-api', version:'2.0.7' @@ -23,7 +20,6 @@ dependencies { testImplementation project(':captureProtobufs') testImplementation testFixtures(project(path: ':coreUtilities')) - testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' diff --git a/TrafficCapture/captureOffloader/build.gradle b/TrafficCapture/captureOffloader/build.gradle index f36862828..cdc0350c6 100644 --- a/TrafficCapture/captureOffloader/build.gradle +++ b/TrafficCapture/captureOffloader/build.gradle @@ -20,16 +20,15 @@ sourceSets { } } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") api group: 'io.netty', name: 'netty-buffer', version: '4.1.100.Final' implementation project(':captureProtobufs') implementation project(':coreUtilities') - implementation group: 'io.opentelemetry', name:'opentelemetry-api' implementation group: 'com.google.protobuf', name: 'protobuf-java', version: '3.22.2' implementation group: 'org.projectlombok', name: 'lombok', version: '1.18.26' implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' + testImplementation project(':coreUtilities') testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index 5cbd91ad8..d1fe06510 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -41,7 +41,7 @@ repositories { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") + api platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation project(':captureProtobufs') @@ -59,15 +59,18 @@ dependencies { implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' // OpenTelemetry core - implementation group: 'io.opentelemetry', name:'opentelemetry-api' + api group: 'io.opentelemetry', name:'opentelemetry-api' + api group: 'io.opentelemetry', name:'opentelemetry-sdk' implementation group: 'io.opentelemetry', name:'opentelemetry-exporter-otlp' - implementation group: 'io.opentelemetry', name:'opentelemetry-sdk' implementation group: 'io.opentelemetry.instrumentation', name:'opentelemetry-log4j-appender-2.17', version: '1.30.0-alpha' implementation group: 'io.opentelemetry', name:'opentelemetry-semconv', version: '1.30.0-alpha' // OpenTelemetry log4j appender implementation("io.opentelemetry.instrumentation:opentelemetry-log4j-appender-2.17:1.30.0-alpha") + testFixturesApi group: 'io.opentelemetry', name: 'opentelemetry-api' + testFixturesApi group: 'io.opentelemetry', name: 'opentelemetry-sdk' + testFixturesApi group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk-testing' diff --git a/TrafficCapture/nettyWireLogging/build.gradle b/TrafficCapture/nettyWireLogging/build.gradle index 24ff8217c..cf08434fd 100644 --- a/TrafficCapture/nettyWireLogging/build.gradle +++ b/TrafficCapture/nettyWireLogging/build.gradle @@ -8,7 +8,6 @@ plugins { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation platform("io.netty:netty-bom:4.1.100.Final") implementation project(':captureOffloader') @@ -17,13 +16,11 @@ dependencies { api group: 'io.netty', name: 'netty-codec-http' api group: 'io.netty', name: 'netty-handler' - implementation group: 'io.opentelemetry', name:'opentelemetry-api' implementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testImplementation project(':captureProtobufs') testImplementation group: 'com.google.guava', name: 'guava', version: '32.0.1-jre' testImplementation group: 'com.google.protobuf', name: 'protobuf-java', version:'3.22.2' - testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation group: 'org.apache.httpcomponents.client5', name: 'httpclient5', version: '5.2.1' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' @@ -32,6 +29,4 @@ dependencies { testImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testImplementation testFixtures(project(path: ':testUtilities')) - - } diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index 2526c3698..74fd3dfe8 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -13,8 +13,6 @@ configurations { } dependencies { - implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") - implementation 'org.opensearch.plugin:opensearch-security:2.11.1.0' implementation 'org.opensearch:opensearch-common:2.11.0' implementation 'org.opensearch:opensearch-core:2.11.0' @@ -39,15 +37,10 @@ dependencies { implementation group: 'com.beust', name: 'jcommander', version: '1.82' implementation 'com.google.protobuf:protobuf-java:3.22.2' - implementation group: 'io.opentelemetry', name:'opentelemetry-api' - implementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' - testImplementation project(':captureProtobufs') testImplementation testFixtures(project(path: ':testUtilities')) testImplementation testFixtures(project(path: ':captureOffloader')) testImplementation testFixtures(project(path: ':coreUtilities')) - - testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' } tasks.withType(Tar){ diff --git a/TrafficCapture/trafficReplayer/build.gradle b/TrafficCapture/trafficReplayer/build.gradle index b8797a3ba..368eb1901 100644 --- a/TrafficCapture/trafficReplayer/build.gradle +++ b/TrafficCapture/trafficReplayer/build.gradle @@ -35,7 +35,6 @@ repositories { dependencies { //spotbugs 'com.github.spotbugs:spotbugs:4.7.3' def resilience4jVersion = "1.7.0"; - implementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") implementation project(':captureProtobufs') implementation project(':coreUtilities') @@ -51,8 +50,6 @@ dependencies { implementation group: 'io.github.resilience4j', name: 'resilience4j-ratelimiter', version:"${resilience4jVersion}" implementation group: 'io.github.resilience4j', name: 'resilience4j-retry', version:"${resilience4jVersion}" implementation group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' - implementation group: 'io.opentelemetry', name:'opentelemetry-api' - implementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' implementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.6.0' implementation group: 'org.apache.logging.log4j', name: 'log4j-api', version: '2.20.0' implementation group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.20.0' @@ -70,14 +67,10 @@ dependencies { testFixturesImplementation testFixtures(project(path: ':coreUtilities')) testFixturesImplementation testFixtures(project(path: ':testUtilities')) - testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") testFixturesImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' testFixturesImplementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.15.0' testFixturesImplementation group: 'io.netty', name: 'netty-all', version: '4.1.100.Final' testFixturesImplementation group: 'org.junit.jupiter', name:'junit-jupiter-api', version:'5.9.3' - testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' - testFixturesImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk' - testFixturesImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation project(':captureOffloader') testImplementation testFixtures(project(path: ':captureOffloader')) @@ -87,7 +80,6 @@ dependencies { testImplementation project(':replayerPlugins:jsonMessageTransformers:jsonJoltMessageTransformerProvider') testImplementation project(':replayerPlugins:jsonMessageTransformers:openSearch23PlusTargetTransformerProvider') - testImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testImplementation group: 'org.apache.httpcomponents.client5', name: 'httpclient5', version: '5.2.1' testImplementation group: 'org.junit.jupiter', name:'junit-jupiter-api', version:'5.x.x' testImplementation group: 'org.testcontainers', name: 'junit-jupiter', version: '1.19.0' From 5c454ca22c85986d6d471a3d742902f2e1451cda Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 22 Jan 2024 16:07:29 -0500 Subject: [PATCH 71/94] Partial checkin to delete dead code and clean up imports and style issues. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 6 +- .../tracing/KafkaRecordContext.java | 4 +- .../KafkaCaptureFactoryTest.java | 4 - .../TestRootKafkaOffloaderContext.java | 4 - ...CodedOutputStreamAndByteBufferWrapper.java | 6 +- .../FileConnectionCaptureFactory.java | 1 - .../OrderedStreamLifecyleManager.java | 2 +- ...eamChannelConnectionCaptureSerializer.java | 1 - .../StreamLifecycleManager.java | 1 + .../tracing/ConnectionContext.java | 8 -- .../tracing/IRootOffloaderContext.java | 5 - ...hannelConnectionCaptureSerializerTest.java | 1 - .../InMemoryConnectionCaptureFactory.java | 1 - TrafficCapture/coreUtilities/build.gradle | 16 +-- .../coreutils/MetricsLogBuilder.java | 2 +- .../migrations/coreutils/MetricsLogger.java | 22 +---- .../AttributeNameMatchingPredicate.java | 42 -------- .../tracing/BaseNestedSpanContext.java | 16 +-- .../CommonScopedMetricInstruments.java | 1 + .../tracing/FilteringAttributeBuilder.java | 62 ------------ .../tracing/IHasRootInstrumentationScope.java | 2 - .../tracing/IInstrumentationAttributes.java | 4 +- .../migrations/tracing/IRootOtelContext.java | 1 - .../IScopedInstrumentationAttributes.java | 6 +- .../migrations/tracing/ISpanGenerator.java | 8 -- .../tracing/ISpanWithParentGenerator.java | 9 -- .../tracing/IWithStartTimeAndAttributes.java | 1 - .../tracing/IndirectNestedSpanContext.java | 16 --- .../migrations/tracing/RootOtelContext.java | 9 +- .../migrations/tracing/ContextTracker.java | 19 ++-- .../main/docker/otel-collector-config.yaml | 16 +-- .../src/main/docker/prometheus.yaml | 4 +- ...nditionallyReliableLoggingHttpHandler.java | 1 - .../netty/LoggingHttpHandler.java | 4 +- .../netty/PassThruHttpHeaders.java | 1 - .../netty/RequestCapturePredicate.java | 1 - .../tracing/IRootWireLoggingContext.java | 7 +- .../netty/tracing/IWireCaptureContexts.java | 10 +- .../netty/tracing/WireCaptureContexts.java | 25 +++-- .../proxyserver/CaptureProxy.java | 1 - .../netty/NettyScanningHttpProxy.java | 2 - .../netty/ProxyChannelInitializer.java | 3 - .../replay/AccumulationCallbacks.java | 6 +- ...edTrafficToHttpTransactionAccumulator.java | 8 +- .../replay/ClientConnectionPool.java | 2 +- .../replay/PacketConsumerFactory.java | 1 - ...acketToTransformingHttpHandlerFactory.java | 1 - .../replay/ParsedHttpMessagesAsDicts.java | 11 +-- .../migrations/replay/ReplayEngine.java | 2 - .../replay/RequestResponsePacketPair.java | 3 +- .../replay/RequestSenderOrchestrator.java | 3 +- .../replay/SourceTargetCaptureTuple.java | 2 - .../replay/TrafficCaptureSourceFactory.java | 3 - .../migrations/replay/TrafficReplayer.java | 21 ++-- .../replay/TupleParserChainConsumer.java | 4 +- .../NettyPacketToHttpConsumer.java | 6 +- ...dHttpRequestPreliminaryConvertHandler.java | 3 +- .../http/RequestPipelineOrchestrator.java | 1 - .../http/helpers/ReadMeteringingHandler.java | 4 +- .../http/helpers/WriteMeteringHandler.java | 4 +- .../datatypes/ConnectionReplaySession.java | 5 +- .../datatypes/ISourceTrafficChannelKey.java | 1 + .../replay/datatypes/ITrafficStreamKey.java | 1 + .../datatypes/PojoTrafficStreamKey.java | 1 + .../PojoTrafficStreamKeyAndContext.java | 5 +- .../kafka/KafkaTrafficCaptureSource.java | 1 - .../replay/tracing/ChannelContextManager.java | 6 +- .../tracing/IKafkaConsumerContexts.java | 26 +++-- .../replay/tracing/IReplayContexts.java | 92 +++++++++++++++--- .../replay/tracing/IRootReplayerContext.java | 2 +- .../tracing/ITrafficSourceContexts.java | 17 +++- .../replay/tracing/KafkaConsumerContexts.java | 11 +-- .../replay/tracing/ReplayContexts.java | 97 ++++++++++++------- .../replay/tracing/RootReplayerContext.java | 3 +- .../replay/tracing/TrafficSourceContexts.java | 5 +- .../traffic/source/BlockingTrafficSource.java | 4 - .../source/ISimpleTrafficCaptureSource.java | 5 - .../traffic/source/InputStreamOfTraffic.java | 2 +- .../CompressedFileTrafficCaptureSource.java | 2 - ...xpiringTrafficStreamMapSequentialTest.java | 1 - ...ExpiringTrafficStreamMapUnorderedTest.java | 1 - .../FullReplayerWithTracingChecksTest.java | 1 - .../replay/FullTrafficReplayerTest.java | 17 ---- .../KafkaRestartingTrafficReplayerTest.java | 2 - .../replay/ParsedHttpMessagesAsDictsTest.java | 5 - .../replay/ResultsToLogsConsumerTest.java | 2 - .../replay/SentinelSensingTrafficSource.java | 2 - .../SigV4SigningTransformationTest.java | 1 - ...afficToHttpTransactionAccumulatorTest.java | 2 - .../migrations/replay/TimeShifterTest.java | 2 - .../replay/TrafficReplayerRunner.java | 1 - .../replay/TrafficReplayerTest.java | 3 - .../replay/V0_1TrafficCaptureSource.java | 1 - .../replay/kafka/KafkaKeepAliveTests.java | 4 +- ...KafkaTrafficCaptureSourceLongTermTest.java | 1 - .../kafka/KafkaTrafficCaptureSourceTest.java | 5 +- .../replay/tracing/TracingTest.java | 21 ++-- .../tracing/InstrumentationTest.java | 5 +- .../migrations/tracing/TestContext.java | 1 + 99 files changed, 334 insertions(+), 470 deletions(-) delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java delete mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 650599114..833197d77 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -7,23 +7,21 @@ import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.opensearch.migrations.tracing.IInstrumentConstructor; +import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.CodedOutputStreamHolder; import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.OrderedStreamLifecyleManager; import org.opensearch.migrations.trafficcapture.StreamChannelConnectionCaptureSerializer; -import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.IRootKafkaOffloaderContext; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.KafkaRecordContext; import java.io.IOException; import java.nio.ByteBuffer; -import java.time.Duration; import java.time.Instant; import java.util.Arrays; import java.util.concurrent.CompletableFuture; diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index a4cc59df4..afd878881 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -2,15 +2,13 @@ import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; import lombok.Getter; import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; +import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; public class KafkaRecordContext extends BaseNestedSpanContext diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java index 295372b8d..2c5e2c0c2 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactoryTest.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader; import io.netty.buffer.Unpooled; -import io.opentelemetry.api.GlobalOpenTelemetry; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.ApiVersions; import org.apache.kafka.clients.producer.Callback; @@ -19,8 +18,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.migrations.tracing.RootOtelContext; -import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.TestRootKafkaOffloaderContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; @@ -30,7 +27,6 @@ import java.time.Instant; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import static org.mockito.ArgumentMatchers.any; diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java index 5733d751d..954a614f0 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java @@ -1,12 +1,8 @@ package org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing; -import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.api.trace.Span; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; import lombok.Getter; -import lombok.NonNull; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/CodedOutputStreamAndByteBufferWrapper.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/CodedOutputStreamAndByteBufferWrapper.java index 46927afe6..7cbd0bf52 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/CodedOutputStreamAndByteBufferWrapper.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/CodedOutputStreamAndByteBufferWrapper.java @@ -7,9 +7,11 @@ import java.nio.ByteBuffer; public class CodedOutputStreamAndByteBufferWrapper implements CodedOutputStreamHolder { - @NonNull @Getter + @NonNull + @Getter private final CodedOutputStream outputStream; - @NonNull @Getter + @NonNull + @Getter private final ByteBuffer byteBuffer; public CodedOutputStreamAndByteBufferWrapper(int bufferSize) { diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java index cf999108e..dc1cadeb0 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java @@ -4,7 +4,6 @@ import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.FileNotFoundException; import java.io.FileOutputStream; diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/OrderedStreamLifecyleManager.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/OrderedStreamLifecyleManager.java index d585c1ff4..b3dc72e75 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/OrderedStreamLifecyleManager.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/OrderedStreamLifecyleManager.java @@ -11,5 +11,5 @@ public CompletableFuture closeStream(CodedOutputStreamHolder outputStreamHold } protected abstract CompletableFuture kickoffCloseStream(CodedOutputStreamHolder outputStreamHolder, - int index); + int index); } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java index 5a04e239d..fdd2344ff 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java @@ -6,7 +6,6 @@ import io.netty.buffer.ByteBuf; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; - import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.ConnectionExceptionObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java index b41af74a5..b7c97f3c2 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java @@ -7,5 +7,6 @@ public interface StreamLifecycleManager extends AutoCloseable { CodedOutputStreamHolder createStream(); CompletableFuture closeStream(CodedOutputStreamHolder outputStreamHolder, int index); + void close() throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 8ab31e759..15db7ea94 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -1,25 +1,17 @@ package org.opensearch.migrations.trafficcapture.tracing; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.metrics.DoubleHistogram; -import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.metrics.MeterProvider; import lombok.Getter; import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; -import org.opensearch.migrations.tracing.AttributeNameMatchingPredicate; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.FilteringAttributeBuilder; import org.opensearch.migrations.tracing.IHasRootInstrumentationScope; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; public class ConnectionContext extends BaseNestedSpanContext implements IConnectionContext, IHasRootInstrumentationScope { - private static final AttributeNameMatchingPredicate KEYS_TO_EXCLUDE_FOR_ACTIVE_CONNECTION_COUNT = - AttributeNameMatchingPredicate.builder(true).add(CONNECTION_ID_ATTR.getKey()).build(); public static final String ACTIVE_CONNECTION = "activeConnection"; public static final String ACTIVITY_NAME = "captureConnection"; diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java index 8c041aaaf..2b0e6bab0 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/IRootOffloaderContext.java @@ -1,11 +1,6 @@ package org.opensearch.migrations.trafficcapture.tracing; -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.metrics.LongUpDownCounter; -import io.opentelemetry.api.metrics.Meter; -import lombok.Getter; import org.opensearch.migrations.tracing.IRootOtelContext; -import org.opensearch.migrations.tracing.RootOtelContext; public interface IRootOffloaderContext extends IRootOtelContext { //public static final String OFFLOADER_SCOPE_NAME = "Offloader"; diff --git a/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java b/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java index 97def992c..46ae835b2 100644 --- a/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java +++ b/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java @@ -24,7 +24,6 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Base64; -import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.CompletableFuture; diff --git a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java index 5a58029ef..ce2710906 100644 --- a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java @@ -5,7 +5,6 @@ import lombok.Getter; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.util.Arrays; diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index d1fe06510..92acdc556 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -55,15 +55,15 @@ dependencies { // Log4j implementation(platform("org.apache.logging.log4j:log4j-bom:2.21.1")) implementation group: 'org.apache.logging.log4j', name: 'log4j-api' - implementation group: 'org.apache.logging.log4j', name :'log4j-core' + implementation group: 'org.apache.logging.log4j', name: 'log4j-core' implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl', version: '2.20.0' // OpenTelemetry core - api group: 'io.opentelemetry', name:'opentelemetry-api' - api group: 'io.opentelemetry', name:'opentelemetry-sdk' - implementation group: 'io.opentelemetry', name:'opentelemetry-exporter-otlp' - implementation group: 'io.opentelemetry.instrumentation', name:'opentelemetry-log4j-appender-2.17', version: '1.30.0-alpha' - implementation group: 'io.opentelemetry', name:'opentelemetry-semconv', version: '1.30.0-alpha' + api group: 'io.opentelemetry', name: 'opentelemetry-api' + api group: 'io.opentelemetry', name: 'opentelemetry-sdk' + implementation group: 'io.opentelemetry', name: 'opentelemetry-exporter-otlp' + implementation group: 'io.opentelemetry.instrumentation', name: 'opentelemetry-log4j-appender-2.17', version: '1.30.0-alpha' + implementation group: 'io.opentelemetry', name: 'opentelemetry-semconv', version: '1.30.0-alpha' // OpenTelemetry log4j appender implementation("io.opentelemetry.instrumentation:opentelemetry-log4j-appender-2.17:1.30.0-alpha") @@ -72,8 +72,8 @@ dependencies { testFixturesApi group: 'io.opentelemetry', name: 'opentelemetry-sdk' testFixturesApi group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testFixturesImplementation platform("io.opentelemetry:opentelemetry-bom:1.34.1") - testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-api' - testFixturesImplementation group: 'io.opentelemetry', name:'opentelemetry-sdk-testing' + testFixturesImplementation group: 'io.opentelemetry', name: 'opentelemetry-api' + testFixturesImplementation group: 'io.opentelemetry', name: 'opentelemetry-sdk-testing' testFixturesImplementation group: 'org.slf4j', name: 'slf4j-api', version: '2.0.7' } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java index 90744baa7..2a13d0f37 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogBuilder.java @@ -1,8 +1,8 @@ package org.opensearch.migrations.coreutils; import lombok.extern.slf4j.Slf4j; -import org.slf4j.event.Level; import org.slf4j.Logger; +import org.slf4j.event.Level; import org.slf4j.spi.LoggingEventBuilder; import java.util.Optional; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java index ef61af2d6..0bbc8b714 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/coreutils/MetricsLogger.java @@ -1,30 +1,10 @@ package org.opensearch.migrations.coreutils; -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.trace.Tracer; -import io.opentelemetry.context.Context; -import io.opentelemetry.exporter.otlp.logs.OtlpGrpcLogRecordExporter; -import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; -import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; -import io.opentelemetry.sdk.OpenTelemetrySdk; -import io.opentelemetry.sdk.logs.SdkLoggerProvider; -import io.opentelemetry.sdk.logs.export.BatchLogRecordProcessor; -import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; -import io.opentelemetry.sdk.metrics.SdkMeterProvider; -import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; -import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; -import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; -import org.slf4j.Logger; import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.time.Duration; -import java.util.concurrent.TimeUnit; - @Slf4j public class MetricsLogger { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java deleted file mode 100644 index e1438e279..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/AttributeNameMatchingPredicate.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.AttributeKey; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; - -import java.util.HashSet; -import java.util.Set; -import java.util.function.Predicate; - -public class AttributeNameMatchingPredicate implements Predicate { - private final boolean negate; - private final Set keysToMatch; - - public static class Builder { - private final Set namesSet = new HashSet<>(); - private final boolean negate; - public Builder(boolean negate) { - this.negate = negate; - } - public Builder add(String name) { - namesSet.add(name); - return this; - } - public AttributeNameMatchingPredicate build() { - return new AttributeNameMatchingPredicate(negate, namesSet); - } - } - - public static Builder builder(boolean negate) { - return new Builder(negate); - } - - private AttributeNameMatchingPredicate(boolean negate, Set keysToMatch) { - this.negate = negate; - this.keysToMatch = keysToMatch; - } - - @Override - public boolean test(AttributeKey attribute) { - return keysToMatch.contains(attribute.getKey()) == negate; - } -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index 620640eae..e2300ab9d 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -6,7 +6,6 @@ import io.opentelemetry.api.trace.Span; import lombok.Getter; import lombok.NonNull; -import lombok.Setter; import java.time.Instant; import java.util.Optional; @@ -16,14 +15,17 @@ public abstract class BaseNestedSpanContext implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, IHasRootInstrumentationScope, AutoCloseable { final T enclosingScope; - @Getter final Instant startTime; - @Getter private Span currentSpan; - @Getter private final S rootInstrumentationScope; + @Getter + final Instant startTime; + @Getter + private Span currentSpan; + @Getter + private final S rootInstrumentationScope; @Getter Throwable observedExceptionToIncludeInMetrics; protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder attributesBuilder, - AttributeKey key, Optional value) { + AttributeKey key, Optional value) { return value.map(v -> attributesBuilder.put(key, v)).orElse(attributesBuilder); } @@ -45,7 +47,9 @@ public IInstrumentationAttributes getEnclosingScope() { return enclosingScope; } - public T getImmediateEnclosingScope() { return enclosingScope; } + public T getImmediateEnclosingScope() { + return enclosingScope; + } protected void initializeSpan() { initializeSpan(Attributes.builder()); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java index 2aa0333c3..ec614e56a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java @@ -15,6 +15,7 @@ public class CommonScopedMetricInstruments { final LongCounter contextCounter; final LongCounter exceptionCounter; final DoubleHistogram contextDuration; + public CommonScopedMetricInstruments(Meter meter, String activityName) { this(meter, activityName, null); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java deleted file mode 100644 index 8206a9e74..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/FilteringAttributeBuilder.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.AttributeKey; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; -import lombok.Getter; - -import java.util.Collections; -import java.util.Set; -import java.util.function.Predicate; - -/** - * The use-case of filtering attributes in instruments might be better to implement via views. - */ -@Getter -public class FilteringAttributeBuilder implements AttributesBuilder { - - private AttributesBuilder underlyingBuilder; - private final Predicate excludePredicate; - - public FilteringAttributeBuilder(Predicate excludePredicate) { - this(Attributes.builder(), excludePredicate); - } - - public FilteringAttributeBuilder(AttributesBuilder underlyingBuilder, Predicate excludePredicate) { - this.underlyingBuilder = underlyingBuilder; - this.excludePredicate = excludePredicate; - } - - public static FilteringAttributeBuilder getBuilderThatIncludesNone() { - return new FilteringAttributeBuilder(x->true); - } - - @Override - public Attributes build() { - return underlyingBuilder.build(); - } - - @Override - public AttributesBuilder put(AttributeKey key, int value) { - if (excludePredicate.test(key)) { - return this; - } - underlyingBuilder = underlyingBuilder.put(key, value); - return this; - } - - @Override - public AttributesBuilder put(AttributeKey key, T value) { - if (excludePredicate.test(key)) { - return this; - } - underlyingBuilder = underlyingBuilder.put(key, value); - return this; - } - - @Override - public AttributesBuilder putAll(Attributes attributes) { - attributes.forEach((k,v)->{ this.underlyingBuilder = underlyingBuilder.put((AttributeKey)k,v); }); - return this; - } -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java index b9381b1cf..80a56cbb0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IHasRootInstrumentationScope.java @@ -1,7 +1,5 @@ package org.opensearch.migrations.tracing; -import lombok.Getter; - /** * This exists as helper glue to make pattern matching in the generics * work to allow for more simplified constructors. diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 4fa589508..999f02835 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -79,11 +79,11 @@ default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder } default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { - meterHistogram(histogram, value.toNanos()/1_000_000.0); + meterHistogram(histogram, value.toNanos() / 1_000_000.0); } default void meterHistogramMillis(DoubleHistogram histogram, Duration value, AttributesBuilder attributesBuilder) { - meterHistogram(histogram, value.toNanos()/1_000_000.0, attributesBuilder); + meterHistogram(histogram, value.toNanos() / 1_000_000.0, attributesBuilder); } default void meterHistogram(DoubleHistogram histogram, double value) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java index c98382053..4fb34ece9 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IRootOtelContext.java @@ -1,6 +1,5 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; public interface IRootOtelContext extends IInstrumentationAttributes, IInstrumentConstructor { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index 6e49e0aaa..dec7c095f 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -1,7 +1,7 @@ package org.opensearch.migrations.tracing; -import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.trace.Span; import lombok.NonNull; @@ -12,13 +12,17 @@ public interface IScopedInstrumentationAttributes @Override @NonNull Span getCurrentSpan(); + CommonScopedMetricInstruments getMetrics(); + default LongCounter getEndOfScopeCountMetric() { return getMetrics().contextCounter; } + default DoubleHistogram getEndOfScopeDurationMetric() { return getMetrics().contextDuration; } + default void endSpan() { getCurrentSpan().end(); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java deleted file mode 100644 index 84eb59192..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanGenerator.java +++ /dev/null @@ -1,8 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.Span; - -import java.util.function.Function; - -public interface ISpanGenerator extends Function { } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java deleted file mode 100644 index e9180fde7..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/ISpanWithParentGenerator.java +++ /dev/null @@ -1,9 +0,0 @@ -package org.opensearch.migrations.tracing; - -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.Span; - -import java.util.function.BiFunction; - -public interface ISpanWithParentGenerator extends BiFunction { -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 75f90f0d6..814b0199d 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -2,7 +2,6 @@ import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; -import io.opentelemetry.api.metrics.LongHistogram; import java.time.Duration; import java.time.Instant; diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java deleted file mode 100644 index 0d0bcda21..000000000 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IndirectNestedSpanContext.java +++ /dev/null @@ -1,16 +0,0 @@ -package org.opensearch.migrations.tracing; - -import lombok.NonNull; - -public abstract class IndirectNestedSpanContext, - L> - extends BaseNestedSpanContext - implements IWithTypedEnclosingScope -{ - protected IndirectNestedSpanContext(@NonNull D enclosingScope) { - super(enclosingScope.getRootInstrumentationScope(), enclosingScope); - } - - public abstract L getLogicalEnclosingScope(); -} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 81ffa036c..a5695024c 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -116,15 +116,16 @@ public AttributesBuilder fillAttributes(AttributesBuilder builder) { private static SpanBuilder addLinkedToBuilder(Stream linkedSpanContexts, SpanBuilder spanBuilder) { return Optional.ofNullable(linkedSpanContexts) - .map(ss->ss.collect(Utils.foldLeft(spanBuilder, (b,s)->b.addLink(s.getSpanContext())))) + .map(ss -> ss.collect(Utils.foldLeft(spanBuilder, (b, s) -> b.addLink(s.getSpanContext())))) .orElse(spanBuilder); } private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan, Stream linkedSpanContexts) { - return addLinkedToBuilder(linkedSpanContexts, Optional.ofNullable(parentSpan) - .map(p -> builder.setParent(Context.current().with(p))) - .orElseGet(builder::setNoParent)) + return addLinkedToBuilder(linkedSpanContexts, + Optional.ofNullable(parentSpan) + .map(p -> builder.setParent(Context.current().with(p))) + .orElseGet(builder::setNoParent)) .startSpan().setAllAttributes(attrs); } diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java index 7fa87c90e..d33aeb597 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java @@ -3,26 +3,25 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import java.lang.ref.WeakReference; -import java.util.HashMap; import java.util.Map; import java.util.WeakHashMap; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; -import java.util.stream.Stream; @Slf4j public class ContextTracker { - private static class ExceptionForStackTracingOnly extends Exception {} + private static class ExceptionForStackTracingOnly extends Exception { + } + @Getter public static class CallDetails { private final ExceptionForStackTracingOnly createStackException; private ExceptionForStackTracingOnly closeStackException; + public CallDetails() { createStackException = new ExceptionForStackTracingOnly(); } } + private final Map scopedContextToCallDetails = new WeakHashMap<>(); private final Object lockObject = new Object(); @@ -41,19 +40,19 @@ public void onClosed(IScopedInstrumentationAttributes ctx) { assert oldCallDetails != null; final var oldE = oldCallDetails.closeStackException; if (oldE != null) { - log.atError().setCause(newExceptionStack).setMessage(()->"Close is being called here").log(); - log.atError().setCause(oldE).setMessage(()->"... but close was already called here").log(); + log.atError().setCause(newExceptionStack).setMessage(() -> "Close is being called here").log(); + log.atError().setCause(oldE).setMessage(() -> "... but close was already called here").log(); assert oldE == null; } oldCallDetails.closeStackException = new ExceptionForStackTracingOnly(); } } - public Map getAllRemainingActiveScopes() { + public Map getAllRemainingActiveScopes() { synchronized (lockObject) { return scopedContextToCallDetails.entrySet().stream() // filter away items that were closed but not cleared yet (since it's a weak map) - .filter(kvp->kvp.getValue().closeStackException == null) + .filter(kvp -> kvp.getValue().closeStackException == null) // make a copy since we're in a synchronized block .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml index c08cb8788..3334c57d8 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml @@ -1,7 +1,7 @@ receivers: otlp: protocols: - grpc: + grpc: processors: batch: @@ -31,13 +31,13 @@ extensions: endpoint: :55679 service: - extensions: [pprof, zpages, health_check] + extensions: [ pprof, zpages, health_check ] pipelines: traces: - receivers: [otlp] - processors: [batch] - exporters: [otlp/jaeger] + receivers: [ otlp ] + processors: [ batch ] + exporters: [ otlp/jaeger ] metrics: - receivers: [otlp] - processors: [batch] - exporters: [logging,prometheus] + receivers: [ otlp ] + processors: [ batch ] + exporters: [ logging,prometheus ] diff --git a/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml b/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml index 028af3f5e..5b112abd0 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/prometheus.yaml @@ -3,5 +3,5 @@ scrape_configs: scrape_interval: 1s honor_timestamps: true static_configs: - - targets: ['otel-collector:8889'] - - targets: ['otel-collector:8888'] + - targets: [ 'otel-collector:8889' ] + - targets: [ 'otel-collector:8888' ] diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java index bd4828602..1513ae131 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/ConditionallyReliableLoggingHttpHandler.java @@ -9,7 +9,6 @@ import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; import org.opensearch.migrations.trafficcapture.netty.tracing.IWireCaptureContexts; -import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; import java.io.IOException; import java.util.function.Predicate; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java index ffb1f3ddb..8a5f7b5b0 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/LoggingHttpHandler.java @@ -21,13 +21,11 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; -import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.trafficcapture.IChannelConnectionCaptureSerializer; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; import org.opensearch.migrations.trafficcapture.netty.tracing.IWireCaptureContexts; -import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import java.io.IOException; import java.time.Instant; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/PassThruHttpHeaders.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/PassThruHttpHeaders.java index 33953b55c..a45f85d30 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/PassThruHttpHeaders.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/PassThruHttpHeaders.java @@ -6,7 +6,6 @@ import lombok.NonNull; import java.util.Arrays; -import java.util.List; import java.util.stream.Stream; public class PassThruHttpHeaders extends DefaultHttpHeaders { diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestCapturePredicate.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestCapturePredicate.java index 45d4241fa..cc38aebfd 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestCapturePredicate.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/RequestCapturePredicate.java @@ -1,6 +1,5 @@ package org.opensearch.migrations.trafficcapture.netty; -import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpRequest; import lombok.Getter; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java index 3be5b4d3e..1b409c200 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IRootWireLoggingContext.java @@ -1,9 +1,5 @@ package org.opensearch.migrations.trafficcapture.netty.tracing; -import lombok.Getter; -import org.opensearch.migrations.tracing.IRootOtelContext; -import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -import org.opensearch.migrations.trafficcapture.tracing.ConnectionContext; import org.opensearch.migrations.trafficcapture.tracing.IRootOffloaderContext; public interface IRootWireLoggingContext extends IRootOffloaderContext { @@ -11,8 +7,11 @@ public interface IRootWireLoggingContext extends IRootOffloaderContext { WireCaptureContexts.ConnectionContext.MetricInstruments getConnectionInstruments(); WireCaptureContexts.RequestContext.MetricInstruments getRequestInstruments(); + WireCaptureContexts.BlockingContext.MetricInstruments getBlockingInstruments(); + WireCaptureContexts.WaitingForResponseContext.MetricInstruments getWaitingForResponseInstruments(); + WireCaptureContexts.ResponseContext.MetricInstruments getResponseInstruments(); IWireCaptureContexts.ICapturingConnectionContext createConnectionContext(String channelKey, String nodeId); diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java index 9f718b045..f5294bfcc 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java @@ -32,14 +32,17 @@ public interface IHttpMessageContext IWithTypedEnclosingScope { IBlockingContext createBlockingContext(); + IWaitingForResponseContext createWaitingForResponseContext(); + IResponseContext createResponseContext(); + IRequestContext createNextRequestContext(); } - public interface IRequestContext extends IHttpMessageContext - { + public interface IRequestContext extends IHttpMessageContext { String ACTIVITY_NAME = "gatheringRequest"; + default String getActivityName() { return ACTIVITY_NAME; } @@ -55,6 +58,7 @@ default String getActivityName() { public interface IBlockingContext extends IHttpMessageContext { String ACTIVITY_NAME = "blocked"; + default String getActivityName() { return ACTIVITY_NAME; } @@ -62,6 +66,7 @@ default String getActivityName() { public interface IWaitingForResponseContext extends IHttpMessageContext { String ACTIVITY_NAME = "waitingForResponse"; + default String getActivityName() { return ACTIVITY_NAME; } @@ -69,6 +74,7 @@ default String getActivityName() { public interface IResponseContext extends IHttpMessageContext { String ACTIVITY_NAME = "gatheringResponse"; + default String getActivityName() { return ACTIVITY_NAME; } diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java index 30d447077..8785d5251 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -51,7 +51,7 @@ public MetricInstruments(Meter meter, String activityMeter) { @Override public IWireCaptureContexts.IHttpMessageContext createInitialRequestContext() { - return new RequestContext((RootWireLoggingContext)getRootInstrumentationScope(), + return new RequestContext((RootWireLoggingContext) getRootInstrumentationScope(), this, 0); } @@ -74,7 +74,7 @@ public abstract static class HttpMessageContext extends final long sourceRequestIndex; protected HttpMessageContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, - long sourceRequestIndex) { + long sourceRequestIndex) { super(rootWireLoggingContext, enclosingScope); this.sourceRequestIndex = sourceRequestIndex; initializeSpan(); @@ -108,7 +108,7 @@ public IWireCaptureContexts.IResponseContext createResponseContext() { public IWireCaptureContexts.IRequestContext createNextRequestContext() { close(); return new RequestContext(getRootInstrumentationScope(), getImmediateEnclosingScope(), - sourceRequestIndex+1); + sourceRequestIndex + 1); } } @@ -145,7 +145,7 @@ public MetricInstruments(Meter meter, String activityName) { .counterBuilder(MetricNames.BYTES_READ).setUnit(BYTES_UNIT).build(); } } - + public static @NonNull MetricInstruments makeMetrics(Meter meter) { return new MetricInstruments(meter, ACTIVITY_NAME); } @@ -182,8 +182,8 @@ public static class BlockingContext public static final String ACTIVITY_NAME = "blocked"; public BlockingContext(RootWireLoggingContext rootWireLoggingContext, - IConnectionContext enclosingScope, - long sourceRequestIndex) { + IConnectionContext enclosingScope, + long sourceRequestIndex) { super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); } @@ -191,6 +191,7 @@ public BlockingContext(RootWireLoggingContext rootWireLoggingContext, public String getActivityName() { return ACTIVITY_NAME; } + public static class MetricInstruments extends CommonScopedMetricInstruments { private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); @@ -211,9 +212,10 @@ public static class WaitingForResponseContext extends HttpMessageContext implements IWireCaptureContexts.IWaitingForResponseContext { public static final String ACTIVITY_NAME = "waitingForResponse"; + public WaitingForResponseContext(RootWireLoggingContext rootWireLoggingContext, - IConnectionContext enclosingScope, - long sourceRequestIndex) { + IConnectionContext enclosingScope, + long sourceRequestIndex) { super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); } @@ -221,6 +223,7 @@ public WaitingForResponseContext(RootWireLoggingContext rootWireLoggingContext, public String getActivityName() { return ACTIVITY_NAME; } + public static class MetricInstruments extends CommonScopedMetricInstruments { private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); @@ -241,9 +244,10 @@ public static class ResponseContext extends HttpMessageContext implements IWireCaptureContexts.IResponseContext { public static final String ACTIVITY_NAME = "gatheringResponse"; + public ResponseContext(RootWireLoggingContext rootWireLoggingContext, - IConnectionContext enclosingScope, - long sourceRequestIndex) { + IConnectionContext enclosingScope, + long sourceRequestIndex) { super(rootWireLoggingContext, enclosingScope, sourceRequestIndex); } @@ -255,6 +259,7 @@ public String getActivityName() { public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter bytesWritten; + private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); bytesWritten = meter diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index edd172c9a..dd7da0c23 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -25,7 +25,6 @@ import org.opensearch.migrations.trafficcapture.StreamLifecycleManager; import org.opensearch.migrations.trafficcapture.kafkaoffloader.KafkaCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.HeaderValueFilteringCapturePredicate; -import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; import org.opensearch.migrations.trafficcapture.proxyserver.netty.BacksideConnectionPool; import org.opensearch.migrations.trafficcapture.proxyserver.netty.NettyScanningHttpProxy; import org.opensearch.security.ssl.DefaultSecurityKeyStore; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java index 0602c8057..869f84e3d 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java @@ -8,8 +8,6 @@ import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.util.concurrent.DefaultThreadFactory; import lombok.NonNull; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index a44412ff3..c4df0e019 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -6,13 +6,10 @@ import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.ssl.SslHandler; import lombok.NonNull; -import org.opensearch.migrations.tracing.IInstrumentConstructor; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.ConditionallyReliableLoggingHttpHandler; import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; -import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; import javax.net.ssl.SSLEngine; import java.io.IOException; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java index c964ff1ca..6db6e9bdc 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/AccumulationCallbacks.java @@ -1,9 +1,7 @@ package org.opensearch.migrations.replay; import lombok.NonNull; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import java.time.Instant; @@ -12,15 +10,19 @@ public interface AccumulationCallbacks { void onRequestReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull HttpMessageAndTimestamp request); + void onFullDataReceived(@NonNull IReplayContexts.IReplayerHttpTransactionContext ctx, @NonNull RequestResponsePacketPair rrpp); + void onTrafficStreamsExpired(RequestResponsePacketPair.ReconstructionStatus status, @NonNull IReplayContexts.IChannelKeyContext ctx, @NonNull List trafficStreamKeysBeingHeld); + void onConnectionClose(int channelInteractionNumber, @NonNull IReplayContexts.IChannelKeyContext ctx, RequestResponsePacketPair.ReconstructionStatus status, @NonNull Instant when, @NonNull List trafficStreamKeysBeingHeld); + void onTrafficStreamIgnored(@NonNull IReplayContexts.ITrafficStreamsLifecycleContext ctx); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java index a62195170..c5dc6505b 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/CapturedTrafficToHttpTransactionAccumulator.java @@ -162,17 +162,17 @@ private static String summarizeTrafficStream(TrafficStream ts) { public void accept(ITrafficStreamWithKey trafficStreamAndKey) { var yetToBeSequencedTrafficStream = trafficStreamAndKey.getStream(); - log.atTrace().setMessage(()->"Got trafficStream: "+summarizeTrafficStream(yetToBeSequencedTrafficStream)).log(); + log.atTrace().setMessage(() -> "Got trafficStream: " + summarizeTrafficStream(yetToBeSequencedTrafficStream)).log(); var partitionId = yetToBeSequencedTrafficStream.getNodeId(); var connectionId = yetToBeSequencedTrafficStream.getConnectionId(); var tsk = trafficStreamAndKey.getKey(); - var accum = liveStreams.getOrCreateWithoutExpiration(tsk, k->createInitialAccumulation(trafficStreamAndKey)); + var accum = liveStreams.getOrCreateWithoutExpiration(tsk, k -> createInitialAccumulation(trafficStreamAndKey)); var trafficStream = trafficStreamAndKey.getStream(); - for (int i=0; i"Connection terminated: removing " + partitionId + ":" + connectionId + + log.atInfo().setMessage(() -> "Connection terminated: removing " + partitionId + ":" + connectionId + " from liveStreams map").log(); liveStreams.remove(partitionId, connectionId); break; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index b81a3bfe5..4fdbae4fe 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -14,9 +14,9 @@ import io.opentelemetry.context.ContextKey; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java index 74cbdadec..424780143 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketConsumerFactory.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.replay; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; public interface PacketConsumerFactory { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java index ef3cc42d2..83c399791 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/PacketToTransformingHttpHandlerFactory.java @@ -6,7 +6,6 @@ import org.opensearch.migrations.replay.datahandlers.http.HttpJsonTransformingConsumer; import org.opensearch.migrations.replay.datatypes.TransformedOutputAndResult; import org.opensearch.migrations.replay.datatypes.TransformedPackets; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java index ee29dee8f..f7572a3e1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java @@ -3,12 +3,9 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.HttpHeaders; -import io.opentelemetry.api.common.AttributesBuilder; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.coreutils.MetricsLogBuilder; import org.opensearch.migrations.replay.datatypes.TransformedPackets; -import org.opensearch.migrations.replay.datatypes.UniqueSourceRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import java.time.Duration; @@ -100,8 +97,8 @@ public ParsedHttpMessagesAsDicts(IReplayContexts.ITupleHandlingContext context, public static void fillStatusCodeMetrics(@NonNull IReplayContexts.ITupleHandlingContext context, Optional> sourceResponseOp, Optional> targetResponseOp) { - sourceResponseOp.ifPresent(r -> context.setMethod((String)r.get("Method"))); - sourceResponseOp.ifPresent(r -> context.setEndpoint((String)r.get("Request-URI"))); + sourceResponseOp.ifPresent(r -> context.setMethod((String) r.get("Method"))); + sourceResponseOp.ifPresent(r -> context.setEndpoint((String) r.get("Request-URI"))); sourceResponseOp.ifPresent(r -> context.setSourceStatus((Integer) r.get(STATUS_CODE_KEY))); targetResponseOp.ifPresent(r -> context.setTargetStatus((Integer) r.get(STATUS_CODE_KEY))); } @@ -137,8 +134,8 @@ private static Map makeSafeMap(@NonNull IReplayContexts.ITupleHa } catch (Exception e) { // TODO - this isn't a good design choice. // We should follow through with the spirit of this class and leave this as empty optional values - log.atWarn().setMessage(()->"Putting what may be a bogus value in the output because transforming it " + - "into json threw an exception for "+context).setCause(e).log(); + log.atWarn().setMessage(() -> "Putting what may be a bogus value in the output because transforming it " + + "into json threw an exception for " + context).setCause(e).log(); return Map.of("Exception", (Object) e.toString()); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index 59e29a1cb..d6f802b3a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -6,9 +6,7 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java index d457d4e28..6c18b0dfd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestResponsePacketPair.java @@ -27,7 +27,8 @@ public enum ReconstructionStatus { HttpMessageAndTimestamp requestData; HttpMessageAndTimestamp responseData; - @NonNull final ISourceTrafficChannelKey firstTrafficStreamKeyForRequest; + @NonNull + final ISourceTrafficChannelKey firstTrafficStreamKeyForRequest; List trafficStreamKeysBeingHeld; ReconstructionStatus completionStatus; // switch between RequestAccumulation/ResponseAccumulation objects when we're parsing, diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index d31a90011..d8e068837 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -5,12 +5,11 @@ import io.netty.channel.EventLoop; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; +import org.opensearch.migrations.replay.datatypes.ChannelTask; import org.opensearch.migrations.replay.datatypes.ChannelTaskType; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.datatypes.IndexedChannelInteraction; -import org.opensearch.migrations.replay.datatypes.ChannelTask; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java index f4425648f..5bf8b1d5c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/SourceTargetCaptureTuple.java @@ -5,9 +5,7 @@ import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.datatypes.UniqueSourceRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.ReplayContexts; import java.time.Duration; import java.util.List; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java index ede09d74d..6ad84db0e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficCaptureSourceFactory.java @@ -3,13 +3,10 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.kafka.KafkaBehavioralPolicy; import org.opensearch.migrations.replay.kafka.KafkaTrafficCaptureSource; -import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.FileInputStream; import java.io.IOException; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 978a2afb9..1074c9c10 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -13,45 +13,42 @@ import lombok.NonNull; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.opensearch.migrations.coreutils.MetricsLogger; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.ReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; -import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; -import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.IRootReplayerContext; +import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ITrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.replay.traffic.source.TrafficStreamLimiter; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; +import org.opensearch.migrations.tracing.RootOtelContext; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; +import org.opensearch.migrations.transform.IHttpMessage; import org.opensearch.migrations.transform.IJsonTransformer; import org.opensearch.migrations.transform.RemovingAuthTransformerFactory; import org.opensearch.migrations.transform.StaticAuthTransformerFactory; import org.slf4j.event.Level; import org.slf4j.spi.LoggingEventBuilder; -import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; import software.amazon.awssdk.arns.Arn; +import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; import software.amazon.awssdk.regions.Region; import javax.net.ssl.SSLException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.nio.charset.StandardCharsets; import java.io.EOFException; import java.io.IOException; import java.lang.ref.WeakReference; import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java index da7ebfbb8..dc91ce27f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TupleParserChainConsumer.java @@ -1,9 +1,7 @@ package org.opensearch.migrations.replay; import lombok.NonNull; -import org.opensearch.migrations.coreutils.MetricsLogger; -import java.util.Optional; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -11,7 +9,7 @@ public class TupleParserChainConsumer implements Consumer innerConsumer; public TupleParserChainConsumer(@NonNull BiConsumer innerConsumer) { + ParsedHttpMessagesAsDicts> innerConsumer) { this.innerConsumer = innerConsumer; } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index b9351b1b3..6c953d4e1 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -22,15 +22,15 @@ import org.opensearch.migrations.coreutils.MetricsAttributeKey; import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; +import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; import org.opensearch.migrations.replay.datahandlers.http.helpers.WriteMeteringHandler; -import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; -import org.opensearch.migrations.replay.AggregatedRawResponse; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; +import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.net.URI; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java index 142739f36..56ffea6c7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/NettyDecodedHttpRequestPreliminaryConvertHandler.java @@ -12,12 +12,11 @@ import org.opensearch.migrations.replay.datahandlers.PayloadAccessFaultingMap; import org.opensearch.migrations.replay.datahandlers.PayloadNotLoadedException; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IJsonTransformer; -import java.util.List; import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index c2d0149ac..9840d4d80 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -13,7 +13,6 @@ import org.opensearch.migrations.replay.datahandlers.http.helpers.LastHttpContentListener; import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; import org.opensearch.migrations.replay.tracing.IReplayContexts; -import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; import org.opensearch.migrations.transform.IJsonTransformer; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java index 507387a61..87d38cd1e 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java @@ -15,9 +15,9 @@ public class ReadMeteringingHandler extends ChannelInboundHandlerAdapter { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (msg instanceof ByteBuf) { - sizeConsumer.accept(((ByteBuf)msg).readableBytes()); + sizeConsumer.accept(((ByteBuf) msg).readableBytes()); } else if (msg instanceof HttpContent) { - sizeConsumer.accept(((HttpContent)msg).content().readableBytes()); + sizeConsumer.accept(((HttpContent) msg).content().readableBytes()); } super.channelRead(ctx, msg); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java index e3045a7d3..7fdfc9ded 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/WriteMeteringHandler.java @@ -18,9 +18,9 @@ public WriteMeteringHandler(IntConsumer sizeConsumer) { @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (msg instanceof ByteBuf) { - sizeConsumer.accept(((ByteBuf)msg).readableBytes()); + sizeConsumer.accept(((ByteBuf) msg).readableBytes()); } else if (msg instanceof HttpContent) { - sizeConsumer.accept(((HttpContent)msg).content().readableBytes()); + sizeConsumer.accept(((HttpContent) msg).content().readableBytes()); } super.write(ctx, msg, promise); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java index e018f3c61..91f6c8093 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java @@ -27,8 +27,9 @@ public class ConnectionReplaySession { * EventLoop so that we can route all calls for this object into that loop/thread. */ public final EventLoop eventLoop; - @Getter @Setter - private DiagnosticTrackableCompletableFuture channelFutureFuture; + @Getter + @Setter + private DiagnosticTrackableCompletableFuture channelFutureFuture; public final OnlineRadixSorter scheduleSequencer; public final TimeToResponseFulfillmentFutureMap schedule; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java index 252c00fd0..3ca8482c4 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ISourceTrafficChannelKey.java @@ -5,6 +5,7 @@ public interface ISourceTrafficChannelKey { String getNodeId(); + String getConnectionId(); @Getter diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java index 618b37246..0b3ea72db 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ITrafficStreamKey.java @@ -5,5 +5,6 @@ public interface ITrafficStreamKey extends ISourceTrafficChannelKey { int getTrafficStreamIndex(); + @NonNull IReplayContexts.ITrafficStreamsLifecycleContext getTrafficStreamsContext(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java index 02392ac21..a9d193fc7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKey.java @@ -12,6 +12,7 @@ protected PojoTrafficStreamKey(String nodeId, String connectionId, int index) { super(nodeId, connectionId); this.trafficStreamIndex = index; } + protected PojoTrafficStreamKey(PojoImpl tsk, int index) { this(tsk.nodeId, tsk.connectionId, index); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java index 279dcab82..511ab2a0f 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/PojoTrafficStreamKeyAndContext.java @@ -1,15 +1,14 @@ package org.opensearch.migrations.replay.datatypes; -import java.util.function.Function; - import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NonNull; import lombok.Setter; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; +import java.util.function.Function; + @EqualsAndHashCode(callSuper = true) public class PojoTrafficStreamKeyAndContext extends PojoTrafficStreamKey { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java index a74336db6..3689e33b2 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSource.java @@ -19,7 +19,6 @@ import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.FileInputStream; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index 78e96db00..90fb8f9d0 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -2,7 +2,6 @@ import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import java.util.HashMap; import java.util.function.Function; @@ -16,7 +15,8 @@ public ChannelContextManager(RootReplayerContext globalContext) { } private static class RefCountedContext { - @Getter final IReplayContexts.IChannelKeyContext context; + @Getter + final IReplayContexts.IChannelKeyContext context; private int refCount; private RefCountedContext(IReplayContexts.IChannelKeyContext context) { @@ -48,7 +48,7 @@ public IReplayContexts.IChannelKeyContext apply(ITrafficStreamKey tsk) { public IReplayContexts.IChannelKeyContext retainOrCreateContext(ITrafficStreamKey tsk) { return connectionToChannelContextMap.computeIfAbsent(tsk.getConnectionId(), - k-> new RefCountedContext(globalContext.createChannelContext(tsk))).retain(); + k -> new RefCountedContext(globalContext.createChannelContext(tsk))).retain(); } public IReplayContexts.IChannelKeyContext releaseContextFor(IReplayContexts.IChannelKeyContext ctx) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java index 9169d16eb..e07c45b09 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IKafkaConsumerContexts.java @@ -1,6 +1,5 @@ package org.opensearch.migrations.replay.tracing; -import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; @@ -28,19 +27,28 @@ private MetricNames() {} interface IAsyncListeningContext extends IInstrumentationAttributes { } + interface IKafkaConsumerScope extends IScopedInstrumentationAttributes { } + interface ITouchScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.TOUCH; + @Override - default String getActivityName() { return ACTIVITY_NAME; } + default String getActivityName() { + return ACTIVITY_NAME; + } IPollScopeContext createNewPollContext(); } + interface IPollScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.KAFKA_POLL; + @Override - default String getActivityName() { return ACTIVITY_NAME; } + default String getActivityName() { + return ACTIVITY_NAME; + } } /** @@ -48,8 +56,11 @@ interface IPollScopeContext extends IKafkaConsumerScope { */ interface ICommitScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.COMMIT; + @Override - default String getActivityName() { return ACTIVITY_NAME; } + default String getActivityName() { + return ACTIVITY_NAME; + } IKafkaCommitScopeContext createNewKafkaCommitContext(); } @@ -57,9 +68,12 @@ interface ICommitScopeContext extends IKafkaConsumerScope { /** * Context for ONLY the service call to Kafka to perform the commit. */ - interface IKafkaCommitScopeContext extends IKafkaConsumerScope{ + interface IKafkaCommitScopeContext extends IKafkaConsumerScope { String ACTIVITY_NAME = ActivityNames.KAFKA_COMMIT; + @Override - default String getActivityName() { return ACTIVITY_NAME; } + default String getActivityName() { + return ACTIVITY_NAME; + } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index bf27f5859..d639ce043 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -9,7 +9,6 @@ import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; import java.time.Instant; -import java.util.Optional; public abstract class IReplayContexts { @@ -68,7 +67,10 @@ public interface IChannelKeyContext org.opensearch.migrations.tracing.commoncontexts.IConnectionContext { String ACTIVITY_NAME = ActivityNames.CHANNEL; - @Override default String getActivityName() { return ACTIVITY_NAME;} + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } // do not add this as a property // because its components are already being added in the IConnectionContext implementation @@ -89,7 +91,10 @@ public interface IKafkaRecordContext { String ACTIVITY_NAME = ActivityNames.RECORD_LIFETIME; - @Override default String getActivityName() { return ACTIVITY_NAME;} + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } static final AttributeKey RECORD_ID_KEY = AttributeKey.stringKey("recordId"); @@ -108,12 +113,19 @@ public interface ITrafficStreamsLifecycleContext IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TRAFFIC_STREAM_LIFETIME; - @Override default String getActivityName() { return ACTIVITY_NAME;} + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } + ITrafficStreamKey getTrafficStreamKey(); + IChannelKeyContext getChannelKeyContext(); + default String getConnectionId() { return getChannelKey().getConnectionId(); } + default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -129,15 +141,22 @@ public interface IReplayerHttpTransactionContext AttributeKey REPLAYER_REQUEST_INDEX_KEY = AttributeKey.longKey("replayerRequestIndex"); String ACTIVITY_NAME = ActivityNames.HTTP_TRANSACTION; - @Override default String getActivityName() { return ACTIVITY_NAME;} + + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } UniqueReplayerRequestKey getReplayerRequestKey(); + IChannelKeyContext getChannelKeyContext(); + Instant getTimeOfOriginalRequest(); default String getConnectionId() { return getChannelKey().getConnectionId(); } + default ISourceTrafficChannelKey getChannelKey() { return getChannelKeyContext().getChannelKey(); } @@ -157,10 +176,15 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } IRequestAccumulationContext createRequestAccumulationContext(); + IResponseAccumulationContext createResponseAccumulationContext(); + IRequestTransformationContext createTransformationContext(); + IScheduledContext createScheduledContext(Instant timestamp); + ITargetRequestContext createTargetRequestContext(); + ITupleHandlingContext createTupleContext(); } @@ -170,7 +194,9 @@ public interface IRequestAccumulationContext String ACTIVITY_NAME = ActivityNames.ACCUMULATING_REQUEST; @Override - default String getActivityName() { return ACTIVITY_NAME;} + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface IResponseAccumulationContext @@ -179,7 +205,9 @@ public interface IResponseAccumulationContext String ACTIVITY_NAME = ActivityNames.ACCUMULATING_RESPONSE; @Override - default String getActivityName() { return ACTIVITY_NAME;} + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface IRequestTransformationContext @@ -187,9 +215,13 @@ public interface IRequestTransformationContext IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TRANSFORMATION; - @Override default String getActivityName() { return ACTIVITY_NAME;} + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } void onHeaderParse(); + void onPayloadParse(); void onPayloadParseSuccess(); @@ -199,13 +231,21 @@ public interface IRequestTransformationContext void onJsonPayloadParseSucceeded(); void onPayloadBytesIn(int inputSize); + void onUncompressedBytesIn(int inputSize); + void onUncompressedBytesOut(int inputSize); + void onFinalBytesOut(int outputSize); + void onTransformSuccess(); + void onTransformSkip(); + void onTransformFailure(); + void aggregateInputChunk(int sizeInBytes); + void aggregateOutputChunk(int sizeInBytes); } @@ -215,7 +255,9 @@ public interface IScheduledContext String ACTIVITY_NAME = ActivityNames.SCHEDULED; @Override - default String getActivityName() { return ACTIVITY_NAME;} + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface ITargetRequestContext @@ -223,13 +265,19 @@ public interface ITargetRequestContext IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.TARGET_TRANSACTION; - @Override default String getActivityName() { return ACTIVITY_NAME;} + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } void onBytesSent(int size); + void onBytesReceived(int size); IRequestSendingContext createHttpSendingContext(); + IWaitingForHttpResponseContext createWaitingForResponseContext(); + IReceivingHttpResponseContext createHttpReceivingContext(); } @@ -237,21 +285,33 @@ public interface IRequestSendingContext extends IAccumulationScope, IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.REQUEST_SENDING; - @Override default String getActivityName() { return ACTIVITY_NAME;} + + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface IWaitingForHttpResponseContext extends IAccumulationScope, IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.WAITING_FOR_RESPONSE; - @Override default String getActivityName() { return ACTIVITY_NAME;} + + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface IReceivingHttpResponseContext extends IAccumulationScope, IWithTypedEnclosingScope { String ACTIVITY_NAME = ActivityNames.RECEIVING_RESPONSE; - @Override default String getActivityName() { return ACTIVITY_NAME;} + + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface ITupleHandlingContext @@ -265,9 +325,13 @@ public interface ITupleHandlingContext AttributeKey HTTP_VERSION_KEY = AttributeKey.stringKey("version"); // for the span, not metric AttributeKey ENDPOINT_KEY = AttributeKey.stringKey("endpoint"); // for the span, not metric - @Override default String getActivityName() { return ACTIVITY_NAME; } + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } void setSourceStatus(Integer sourceStatus); + void setTargetStatus(Integer targetStatus); void setMethod(String method); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java index ec5301248..843b63879 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IRootReplayerContext.java @@ -1,12 +1,12 @@ package org.opensearch.migrations.replay.tracing; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.tracing.IInstrumentConstructor; import org.opensearch.migrations.tracing.IRootOtelContext; public interface IRootReplayerContext extends IRootOtelContext, IInstrumentConstructor { ITrafficSourceContexts.IReadChunkContext createReadChunkContext(); + IReplayContexts.IChannelKeyContext createChannelContext(ISourceTrafficChannelKey tsk); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java index 235fc31b0..ee21844be 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ITrafficSourceContexts.java @@ -19,8 +19,11 @@ interface ITrafficSourceContext extends IScopedInstrumentationAttributes { } interface IReadChunkContext extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.READ_NEXT_TRAFFIC_CHUNK; + @Override - default String getActivityName() { return ACTIVITY_NAME; } + default String getActivityName() { + return ACTIVITY_NAME; + } IBackPressureBlockContext createBackPressureContext(); @@ -28,10 +31,14 @@ interface IReadChunkContext extends ITrafficSourceContext { IKafkaConsumerContexts.ICommitScopeContext createCommitContext(); } + interface IBackPressureBlockContext extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.BACK_PRESSURE_BLOCK; + @Override - default String getActivityName() { return ACTIVITY_NAME; } + default String getActivityName() { + return ACTIVITY_NAME; + } IWaitForNextSignal createWaitForSignalContext(); @@ -39,8 +46,12 @@ interface IBackPressureBlockContext extends ITrafficSourceContext { IKafkaConsumerContexts.ICommitScopeContext createCommitContext(); } + interface IWaitForNextSignal extends ITrafficSourceContext { String ACTIVITY_NAME = ActivityNames.WAIT_FOR_NEXT_BACK_PRESSURE_CHECK; - default String getActivityName() { return ACTIVITY_NAME; } + + default String getActivityName() { + return ACTIVITY_NAME; + } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index e32bede93..34916f722 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -1,21 +1,15 @@ package org.opensearch.migrations.replay.tracing; -import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; -import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.metrics.MeterProvider; -import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import org.apache.kafka.common.TopicPartition; -import org.checkerframework.checker.units.qual.N; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IHasRootInstrumentationScope; import org.opensearch.migrations.tracing.IInstrumentationAttributes; import java.util.Collection; @@ -29,7 +23,8 @@ public static class AsyncListeningContext @Getter @NonNull public final RootReplayerContext enclosingScope; - @Getter @Setter + @Getter + @Setter Exception observedExceptionToIncludeInMetrics; public AsyncListeningContext(@NonNull RootReplayerContext enclosingScope) { @@ -40,6 +35,7 @@ public static class MetricInstruments { public final LongCounter kafkaPartitionsRevokedCounter; public final LongCounter kafkaPartitionsAssignedCounter; public final LongUpDownCounter kafkaActivePartitionsCounter; + private MetricInstruments(Meter meter) { kafkaPartitionsRevokedCounter = meter .counterBuilder(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT).build(); @@ -87,6 +83,7 @@ private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); } } + public TouchScopeContext(@NonNull TrafficSourceContexts.BackPressureBlockContext enclosingScope) { super(enclosingScope); initializeSpan(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 110e0af43..e2769b6af 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -20,7 +20,6 @@ import java.time.Duration; import java.time.Instant; import java.util.Optional; -import java.util.stream.Stream; public abstract class ReplayContexts extends IReplayContexts { @@ -44,13 +43,14 @@ public ChannelKeyContext(RootReplayerContext rootScope, public static class MetricInstruments extends CommonScopedMetricInstruments { final LongUpDownCounter activeChannelCounter; + private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); activeChannelCounter = meter .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); } } - + public static @NonNull MetricInstruments makeMetrics(Meter meter) { return new MetricInstruments(meter, ACTIVITY_NAME); } @@ -94,6 +94,7 @@ public IChannelKeyContext getLogicalEnclosingScope() { public static class MetricInstruments extends CommonScopedMetricInstruments { final LongCounter recordCounter; final LongCounter bytesCounter; + private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); recordCounter = meter.counterBuilder(MetricNames.KAFKA_RECORD_READ) @@ -176,7 +177,7 @@ public ITrafficStreamKey getTrafficStreamKey() { @Override public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { var parent = getEnclosingScope(); - while(!(parent instanceof IReplayContexts.IChannelKeyContext)) { + while (!(parent instanceof IReplayContexts.IChannelKeyContext)) { parent = parent.getEnclosingScope(); } return (IReplayContexts.IChannelKeyContext) parent; @@ -184,10 +185,11 @@ public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { } public static class HttpTransactionContext - extends BaseNestedSpanContext + extends BaseNestedSpanContext implements IReplayContexts.IReplayerHttpTransactionContext { final UniqueReplayerRequestKey replayerRequestKey; - @Getter final Instant timeOfOriginalRequest; + @Getter + final Instant timeOfOriginalRequest; public HttpTransactionContext(RootReplayerContext rootScope, IReplayContexts.ITrafficStreamsLifecycleContext enclosingScope, @@ -235,6 +237,7 @@ public IReplayContexts.IRequestAccumulationContext createRequestAccumulationCont public IReplayContexts.IResponseAccumulationContext createResponseAccumulationContext() { return new ReplayContexts.ResponseAccumulationContext(this); } + @Override public TargetRequestContext createTargetRequestContext() { return new ReplayContexts.TargetRequestContext(this); @@ -262,7 +265,7 @@ public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { } public static class RequestAccumulationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestAccumulationContext { public RequestAccumulationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -285,7 +288,7 @@ private MetricInstruments(Meter meter, String activityName) { } public static class ResponseAccumulationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IResponseAccumulationContext { public ResponseAccumulationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -308,7 +311,7 @@ private MetricInstruments(Meter meter, String activityName) { } public static class RequestTransformationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestTransformationContext { public RequestTransformationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -379,54 +382,81 @@ private MetricInstruments(Meter meter, String activityName) { return getRootInstrumentationScope().transformationInstruments; } - @Override public void onHeaderParse() { + @Override + public void onHeaderParse() { meterIncrementEvent(getMetrics().headerParses); } - @Override public void onPayloadParse() { + + @Override + public void onPayloadParse() { meterIncrementEvent(getMetrics().payloadParses); } - @Override public void onPayloadParseSuccess() { + + @Override + public void onPayloadParseSuccess() { meterIncrementEvent(getMetrics().payloadSuccessParses); } - @Override public void onJsonPayloadParseRequired() { + + @Override + public void onJsonPayloadParseRequired() { meterIncrementEvent(getMetrics().jsonPayloadParses); } - @Override public void onJsonPayloadParseSucceeded() { + + @Override + public void onJsonPayloadParseSucceeded() { meterIncrementEvent(getMetrics().jsonTransformSuccess); } - @Override public void onPayloadBytesIn(int inputSize) { + + @Override + public void onPayloadBytesIn(int inputSize) { meterIncrementEvent(getMetrics().payloadBytesIn, inputSize); } - @Override public void onUncompressedBytesIn(int inputSize) { + + @Override + public void onUncompressedBytesIn(int inputSize) { meterIncrementEvent(getMetrics().uncompressedBytesIn, inputSize); } - @Override public void onUncompressedBytesOut(int inputSize) { + + @Override + public void onUncompressedBytesOut(int inputSize) { meterIncrementEvent(getMetrics().uncompressedBytesOut, inputSize); } - @Override public void onFinalBytesOut(int inputSize) { + + @Override + public void onFinalBytesOut(int inputSize) { meterIncrementEvent(getMetrics().finalPayloadBytesOut, inputSize); } - @Override public void onTransformSuccess() { + + @Override + public void onTransformSuccess() { meterIncrementEvent(getMetrics().transformSuccess); } - @Override public void onTransformSkip() { + + @Override + public void onTransformSkip() { meterIncrementEvent(getMetrics().transformSkipped); } - @Override public void onTransformFailure() { + + @Override + public void onTransformFailure() { meterIncrementEvent(getMetrics().transformError); } - @Override public void aggregateInputChunk(int sizeInBytes) { + + @Override + public void aggregateInputChunk(int sizeInBytes) { meterIncrementEvent(getMetrics().transformBytesIn, sizeInBytes); meterIncrementEvent(getMetrics().transformChunksIn); } - @Override public void aggregateOutputChunk(int sizeInBytes) { + + @Override + public void aggregateOutputChunk(int sizeInBytes) { meterIncrementEvent(getMetrics().transformBytesOut, sizeInBytes); meterIncrementEvent(getMetrics().transformChunksOut); } } public static class ScheduledContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IScheduledContext { private final Instant scheduledFor; @@ -438,6 +468,7 @@ public ScheduledContext(HttpTransactionContext enclosingScope, Instant scheduled public static class MetricInstruments extends CommonScopedMetricInstruments { DoubleHistogram lag; + private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); lag = meter.histogramBuilder(MetricNames.NETTY_SCHEDULE_LAG).setUnit("ms").build(); @@ -460,7 +491,7 @@ public void sendMeterEventsForEnd() { } public static class TargetRequestContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.ITargetRequestContext { public TargetRequestContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -521,7 +552,7 @@ public IReplayContexts.IWaitingForHttpResponseContext createWaitingForResponseCo } public static class RequestSendingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestSendingContext { public RequestSendingContext(TargetRequestContext enclosingScope) { super(enclosingScope); @@ -544,7 +575,7 @@ private MetricInstruments(Meter meter, String activityName) { } public static class WaitingForHttpResponseContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IWaitingForHttpResponseContext { public WaitingForHttpResponseContext(TargetRequestContext enclosingScope) { super(enclosingScope); @@ -568,7 +599,7 @@ private MetricInstruments(Meter meter, String activityName) { } public static class ReceivingHttpResponseContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IReceivingHttpResponseContext { public ReceivingHttpResponseContext(TargetRequestContext enclosingScope) { super(enclosingScope); @@ -594,7 +625,7 @@ private MetricInstruments(Meter meter, String activityName) { @Getter @Setter public static class TupleHandlingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.ITupleHandlingContext { Integer sourceStatus; Integer targetStatus; @@ -612,17 +643,10 @@ public void close() { } public static class MetricInstruments extends CommonScopedMetricInstruments { - //private final LongCounter statusMatchCounter; private final LongCounter resultCounter; -// private final LongCounter sourceStatus; -// private final LongCounter targetStatus; -// private final LongCounter methodCounter; + private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); - //statusMatchCounter = meter.counterBuilder(MetricNames.STATUS_MATCH).build(); -// sourceStatus = meter.counterBuilder("sourceStatus").build(); -// targetStatus = meter.counterBuilder("targetStatus").build(); -// methodCounter = meter.counterBuilder("method").build(); resultCounter = meter.counterBuilder("tupleResult").build(); } } @@ -655,6 +679,7 @@ public void sendMeterEventsForEnd() { /** * Convert everything in the 2xx range to 200; 300-399 to 300 + * * @param status * @return */ diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index e09f63a01..5b8671fd5 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -1,13 +1,12 @@ package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.OpenTelemetry; +import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.tracing.RootOtelContext; -import lombok.Getter; - @Getter public class RootReplayerContext extends RootOtelContext implements IRootReplayerContext { public static final String SCOPE_NAME = "replayer"; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index fbc1c3d0c..8fb8751b7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -1,11 +1,9 @@ package org.opensearch.migrations.replay.tracing; import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.metrics.MeterProvider; import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; public class TrafficSourceContexts { @@ -36,6 +34,7 @@ private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { return new MetricInstruments(meter, ACTIVITY_NAME); } @@ -75,6 +74,7 @@ private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { return new MetricInstruments(meter, ACTIVITY_NAME); } @@ -98,6 +98,7 @@ private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); } } + public static @NonNull MetricInstruments makeMetrics(Meter meter) { return new MetricInstruments(meter, ACTIVITY_NAME); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java index 675a3adfc..9d4ce61e7 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/BlockingTrafficSource.java @@ -5,10 +5,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.Utils; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.tracing.IKafkaConsumerContexts; -import org.opensearch.migrations.replay.tracing.IRootReplayerContext; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; import org.slf4j.event.Level; @@ -23,7 +20,6 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Function; import java.util.function.Supplier; /** diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java index bf88b7e20..845eb3c01 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/ISimpleTrafficCaptureSource.java @@ -1,9 +1,4 @@ package org.opensearch.migrations.replay.traffic.source; -import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; - -import java.util.List; -import java.util.concurrent.CompletableFuture; - public interface ISimpleTrafficCaptureSource extends ITrafficCaptureSource { } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java index 2fff58b4d..667b18201 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/traffic/source/InputStreamOfTraffic.java @@ -3,8 +3,8 @@ import lombok.Lombok; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; +import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.tracing.ChannelContextManager; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java index 79e60e011..73be373b2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/CompressedFileTrafficCaptureSource.java @@ -6,8 +6,6 @@ import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.EOFException; import java.io.FileInputStream; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java index b6000245d..d852f67bf 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapSequentialTest.java @@ -6,7 +6,6 @@ import org.opensearch.migrations.replay.traffic.expiration.BehavioralPolicy; import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; import java.time.Duration; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java index f5b2f2142..d1681338a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExpiringTrafficStreamMapUnorderedTest.java @@ -7,7 +7,6 @@ import org.opensearch.migrations.replay.traffic.expiration.ExpiringTrafficStreamMap; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import java.nio.charset.StandardCharsets; import java.time.Duration; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java index 4ea891d70..08e8efa78 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -9,7 +9,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index 6aea2cb7e..88e2c6cde 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -1,9 +1,5 @@ package org.opensearch.migrations.replay; -import com.google.protobuf.ByteString; -import com.google.protobuf.Timestamp; -import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; -import io.opentelemetry.sdk.trace.data.SpanData; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; @@ -12,40 +8,27 @@ import org.junit.jupiter.api.Tag; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; -import org.junit.jupiter.params.provider.ValueSource; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; -import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; -import org.opensearch.migrations.trafficcapture.protos.CloseObservation; -import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; -import org.opensearch.migrations.trafficcapture.protos.ReadObservation; -import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import org.opensearch.migrations.trafficcapture.protos.TrafficStreamUtils; -import org.opensearch.migrations.trafficcapture.protos.WriteObservation; -import org.opensearch.migrations.transform.StaticAuthTransformerFactory; import java.io.EOFException; -import java.nio.charset.StandardCharsets; import java.time.Duration; -import java.time.Instant; -import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.PriorityQueue; import java.util.Random; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Collectors; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index 148f15e39..ba0d3013b 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -10,9 +10,7 @@ import org.apache.kafka.clients.producer.ProducerConfig; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; -import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.kafka.KafkaTestUtils; import org.opensearch.migrations.replay.kafka.KafkaTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index d3a89abf3..b71fabd32 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -1,10 +1,5 @@ package org.opensearch.migrations.replay; -import lombok.NonNull; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; -import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 2712474ac..164ec5573 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -15,7 +15,6 @@ import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumerTest; import org.opensearch.migrations.replay.datatypes.HttpRequestTransformationStatus; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; -import org.opensearch.migrations.replay.datatypes.PojoUniqueSourceRequestKey; import org.opensearch.migrations.replay.datatypes.TransformedPackets; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; @@ -29,7 +28,6 @@ import java.time.Instant; import java.util.ArrayList; import java.util.List; -import java.util.function.BiConsumer; import java.util.stream.Collectors; @Slf4j diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java index 7e91b29ac..6d23daca3 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SentinelSensingTrafficSource.java @@ -5,8 +5,6 @@ import org.opensearch.migrations.replay.tracing.ITrafficSourceContexts; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; -import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.io.EOFException; import java.io.IOException; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java index 82f43e31b..4d7563897 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SigV4SigningTransformationTest.java @@ -5,7 +5,6 @@ import org.junit.jupiter.api.Test; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java index 20b9658a6..f73b2b056 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/SimpleCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -10,12 +10,10 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.RawPackets; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.RootOtelContext; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TimeShifterTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TimeShifterTest.java index 9d49b4267..054f4e7ea 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TimeShifterTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TimeShifterTest.java @@ -8,8 +8,6 @@ import java.time.Instant; import java.util.Optional; -import static org.junit.jupiter.api.Assertions.*; - @WrapWithNettyLeakDetection(disableLeakChecks = true) class TimeShifterTest { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index a5697b8d8..b69dd829c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -5,7 +5,6 @@ import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; import org.opensearch.migrations.replay.traffic.source.ISimpleTrafficCaptureSource; import org.opensearch.migrations.tracing.TestContext; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 9472fc187..9f54daa34 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -6,14 +6,11 @@ import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.replay.traffic.source.InputStreamOfTraffic; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.trafficcapture.protos.CloseObservation; import org.opensearch.migrations.trafficcapture.protos.ConnectionExceptionObservation; import org.opensearch.migrations.trafficcapture.protos.EndOfMessageIndication; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java index 9cf64bac5..d05f4c010 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/V0_1TrafficCaptureSource.java @@ -3,7 +3,6 @@ import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamAndKey; import org.opensearch.migrations.replay.tracing.RootReplayerContext; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; import java.io.IOException; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java index eea297cb9..c88d5220b 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaKeepAliveTests.java @@ -9,10 +9,10 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; +import org.opensearch.migrations.tracing.InstrumentationTest; +import org.opensearch.migrations.tracing.TestContext; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index b5fd2aecf..81ad5d8ef 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -6,7 +6,6 @@ import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java index 5eec9a7a1..e2b13fdca 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceTest.java @@ -10,11 +10,10 @@ import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import org.opensearch.migrations.replay.tracing.ReplayContexts; -import org.opensearch.migrations.tracing.InstrumentationTest; -import org.opensearch.migrations.tracing.TestContext; import org.opensearch.migrations.replay.tracing.ChannelContextManager; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.traffic.source.ITrafficStreamWithKey; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.trafficcapture.protos.ReadObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficObservation; import org.opensearch.migrations.trafficcapture.protos.TrafficStream; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index d4e062cc4..ffd504ba8 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -8,7 +8,6 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.tracing.TestContext; import java.time.Duration; @@ -66,21 +65,21 @@ private void checkMetrics(List recordedMetrics) { private void checkSpans(List recordedSpans) { var byName = recordedSpans.stream().collect(Collectors.groupingBy(SpanData::getName)); - var keys = Arrays.stream(IReplayContexts.ActivityNames.class.getFields()).map(f-> { - try { - return f.get(null); - } catch (Exception e) { - Lombok.sneakyThrow(e); - return null; - } - }).toArray(String[]::new); + var keys = Arrays.stream(IReplayContexts.ActivityNames.class.getFields()).map(f -> { + try { + return f.get(null); + } catch (Exception e) { + Lombok.sneakyThrow(e); + return null; + } + }).toArray(String[]::new); Stream.of(keys).forEach(spanName -> { - Assertions.assertNotNull(byName.get(spanName)); + Assertions.assertNotNull(byName.get(spanName)); Assertions.assertEquals(1, byName.get(spanName).size()); byName.remove(spanName); }); Assertions.assertEquals("", byName.entrySet().stream() - .map(kvp->kvp.getKey()+":"+kvp.getValue()).collect(Collectors.joining())); + .map(kvp -> kvp.getKey() + ":" + kvp.getValue()).collect(Collectors.joining())); } } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java index 85aa9a065..a1f0f686d 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java @@ -2,13 +2,14 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; -import org.opensearch.migrations.tracing.TestContext; public class InstrumentationTest { protected TestContext rootContext; - protected TestContext makeContext() { return TestContext.noOtelTracking(); } + protected TestContext makeContext() { + return TestContext.noOtelTracking(); + } @BeforeEach protected void initializeContext() { diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index 1b8a59e1b..145eaf57e 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -71,6 +71,7 @@ public void close() { 0, replayerIdx); return rk.trafficStreamKey.getTrafficStreamsContext().createHttpTransactionContext(rk, Instant.EPOCH); } + public IReplayContexts.ITupleHandlingContext getTestTupleContext() { return getTestTupleContext(DEFAULT_TEST_CONNECTION, 1); From bf8ea86ff4b9cb0adcf011082e1bc30dd76f310c Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 22 Jan 2024 23:48:54 -0500 Subject: [PATCH 72/94] Addressing PR Feedback with some localized cleanups Signed-off-by: Greg Schohn --- TrafficCapture/coreUtilities/build.gradle | 2 +- .../tracing/IInstrumentationAttributes.java | 25 ++++++++------ .../migrations/tracing/RootOtelContext.java | 34 ++++++++++++++----- .../proxyserver/CaptureProxy.java | 2 +- .../migrations/replay/TrafficReplayer.java | 2 +- 5 files changed, 43 insertions(+), 22 deletions(-) diff --git a/TrafficCapture/coreUtilities/build.gradle b/TrafficCapture/coreUtilities/build.gradle index 92acdc556..17ad01040 100644 --- a/TrafficCapture/coreUtilities/build.gradle +++ b/TrafficCapture/coreUtilities/build.gradle @@ -63,7 +63,7 @@ dependencies { api group: 'io.opentelemetry', name: 'opentelemetry-sdk' implementation group: 'io.opentelemetry', name: 'opentelemetry-exporter-otlp' implementation group: 'io.opentelemetry.instrumentation', name: 'opentelemetry-log4j-appender-2.17', version: '1.30.0-alpha' - implementation group: 'io.opentelemetry', name: 'opentelemetry-semconv', version: '1.30.0-alpha' + implementation group: 'io.opentelemetry.semconv', name: 'opentelemetry-semconv', version: '1.23.1-alpha' // OpenTelemetry log4j appender implementation("io.opentelemetry.instrumentation:opentelemetry-log4j-appender-2.17:1.30.0-alpha") diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 999f02835..0f1397fd5 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -9,9 +9,16 @@ import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; import lombok.NonNull; +import org.opensearch.migrations.Utils; import java.time.Duration; +import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; public interface IInstrumentationAttributes { AttributeKey HAD_EXCEPTION_KEY = AttributeKey.booleanKey("hadException"); @@ -32,25 +39,21 @@ default AttributesBuilder fillAttributes(AttributesBuilder builder) { } default Attributes getPopulatedSpanAttributes() { - return getPopulatedSpanAttributes(Attributes.builder()); + return getPopulatedSpanAttributesBuilder().build(); } - default Attributes getPopulatedSpanAttributes(AttributesBuilder builder) { - return getPopulatedSpanAttributesBuilder(builder).build(); - } - - default AttributesBuilder getPopulatedSpanAttributesBuilder(AttributesBuilder builder) { + default AttributesBuilder getPopulatedSpanAttributesBuilder() { + var builder = Attributes.builder(); var currentObj = this; - var stack = new ArrayList(); + var stack = new ArrayDeque(); while (currentObj != null) { stack.add(currentObj); currentObj = currentObj.getEnclosingScope(); } // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts - for (int i=stack.size()-1; i>=0; --i) { - builder = stack.get(i).fillAttributes(builder); - } - return builder; + return StreamSupport.stream( + Spliterators.spliteratorUnknownSize(stack.descendingIterator(), Spliterator.ORDERED), false) + .collect(Utils.foldLeft(builder, (b, iia)->iia.fillAttributes(b))); } default void meterIncrementEvent(LongCounter c) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index a5695024c..92a2edd33 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -18,10 +18,11 @@ import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; -import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import io.opentelemetry.semconv.ResourceAttributes; import lombok.Getter; import lombok.NonNull; import lombok.Setter; +import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.Utils; import java.time.Duration; @@ -29,11 +30,11 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Stream; +@Slf4j public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; private final String scopeName; @Getter - @Setter Exception observedExceptionToIncludeInMetrics; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @@ -46,7 +47,6 @@ public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String .setEndpoint(collectorEndpoint) .setTimeout(2, TimeUnit.SECONDS) .build()) - .setScheduleDelay(100, TimeUnit.MILLISECONDS) .build(); final var metricReader = PeriodicMetricReader.builder(OtlpGrpcMetricExporter.builder() .setEndpoint(collectorEndpoint) @@ -75,23 +75,40 @@ public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String return openTelemetrySdk; } - public static OpenTelemetry initializeOpenTelemetry(String collectorEndpoint, String serviceName) { + public static OpenTelemetry initializeNoopOpenTelemetry() { + return OpenTelemetrySdk.builder().build(); + } + + /** + * Initialize the Otel SDK for a collector if collectorEndpoint != null or setup an empty, + * do-nothing SDK when it is null. + * @param collectorEndpoint - URL of the otel-collector + * @param serviceName - name of this service that is sending data to the collector + * @return a fully initialize OpenTelemetry object capable of producing MeterProviders and TraceProviders + */ + public static OpenTelemetry + initializeOpenTelemetryWithCollectorOrAsNoop(String collectorEndpoint, String serviceName) { return Optional.ofNullable(collectorEndpoint) .map(endpoint -> initializeOpenTelemetryForCollector(endpoint, serviceName)) - .orElse(OpenTelemetrySdk.builder().build()); + .orElseGet(() -> { + if (serviceName != null) { + log.atWarn().setMessage("Collector endpoint=null, so serviceName parameter '" + serviceName + + "' is being ignored since a no-op OpenTelemetry object is being created").log(); + } + return initializeNoopOpenTelemetry(); + }); } - public RootOtelContext(String scopeName) { this(scopeName, null); } public RootOtelContext(String scopeName, String collectorEndpoint, String serviceName) { - this(scopeName, initializeOpenTelemetry(collectorEndpoint, serviceName)); + this(scopeName, initializeOpenTelemetryWithCollectorOrAsNoop(collectorEndpoint, serviceName)); } public RootOtelContext(String scopeName, OpenTelemetry sdk) { - openTelemetryImpl = sdk != null ? sdk : initializeOpenTelemetry(null, null); + openTelemetryImpl = sdk != null ? sdk : initializeOpenTelemetryWithCollectorOrAsNoop(null, null); this.scopeName = scopeName; } @@ -111,6 +128,7 @@ public MeterProvider getMeterProvider() { @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { + assert observedExceptionToIncludeInMetrics == null; return builder; // nothing more to do } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index dd7da0c23..a1da3b95e 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -306,7 +306,7 @@ public static void main(String[] args) throws InterruptedException, IOException var backsideUri = convertStringToUri(params.backsideUriString); var rootContext = new RootCaptureContext( - RootOtelContext.initializeOpenTelemetry(params.otelCollectorEndpoint, "capture")); + RootOtelContext.initializeOpenTelemetryWithCollectorOrAsNoop(params.otelCollectorEndpoint, "capture")); var sksOp = Optional.ofNullable(params.sslConfigFilePath) .map(sslConfigFile->new DefaultSecurityKeyStore(getSettings(sslConfigFile), diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java index 1074c9c10..564c649de 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/TrafficReplayer.java @@ -386,7 +386,7 @@ public static void main(String[] args) System.exit(3); return; } - var topContext = new RootReplayerContext(RootOtelContext.initializeOpenTelemetry(params.otelCollectorEndpoint, + var topContext = new RootReplayerContext(RootOtelContext.initializeOpenTelemetryWithCollectorOrAsNoop(params.otelCollectorEndpoint, "replay")); try (var blockingTrafficSource = TrafficCaptureSourceFactory.createTrafficCaptureSource(topContext, params, Duration.ofSeconds(params.lookaheadTimeSeconds)); From b641c5ae331e76516e971834b98d9b6932c33159 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 24 Jan 2024 09:19:40 -0500 Subject: [PATCH 73/94] aws cli wasn't functional within my arm64 container because the dockerfile was hardcoded for x86_64. Install awscli via pip now, which should work on both environments. Signed-off-by: Greg Schohn --- .../src/main/docker/migrationConsole/Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile index 69186c587..44fa2a848 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile @@ -5,8 +5,9 @@ ENV DEBIAN_FRONTEND noninteractive RUN apt-get update && \ apt-get install -y --no-install-recommends python3.9 python3-pip python3-dev openjdk-11-jre-headless wget gcc libc-dev git curl vim jq unzip less && \ pip3 install urllib3==1.25.11 opensearch-benchmark==1.1.0 awscurl tqdm -# TODO upon the next release of opensearch-benchmark the awscli package should be installed by pip3, with the expected boto3 version upgrade resolving the current conflicts between opensearch-benchmark and awscli -RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && unzip awscliv2.zip && ./aws/install && rm -rf aws awscliv2.zip +# TODO upon the next release of opensearch-benchmark the awscli package should be installed by pip3, \ +# with the expected boto3 version upgrade resolving the current conflicts between opensearch-benchmark and awscli +RUN pip install awscli RUN mkdir /root/kafka-tools RUN mkdir /root/kafka-tools/aws COPY runTestBenchmarks.sh /root/ From cd34b322acdc079d4fe516e7519e913bfc150644 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 26 Jan 2024 12:54:31 -0500 Subject: [PATCH 74/94] Rework otel-collector container packaging. Support 3 different otel-collector container configurations for docker-compose. * Prometheus + Jaeger + OpenSearch (for metrics, traces, and 'analytics/logs') - all of which are local * AWS CloudWatch + AWS X-Ray (using a mounted credentials file if present, or reliant how well the base image can resolve credentials) + OpenSearch (local) * All of the above `./gradlew :dockerSolution:composeUp` will launch the first configuration by default. Passing the flag `-Potel-collector=otel-aws.yml` will use the AWS configuration and `-Potel-collector=otel-everything.yml` will use the 'everything' configuration. The otel-collector container itself still uses the custom-built collector from mikaylathompson as the base image. However, the extended image normalizes the user and entrypoint so that further extensions and applications of the container can behave as if the base image was the AWS Distro for OpenTelemetry collector base image (amazon/aws-otel-collector, see https://aws-otel.github.io/docs/setup/docker-images). That also includes being able to convert the credentials file into environment variables since the otel-collector was otherwise struggling to use the credentials file directly. The otel-collector configuration is contained within one yaml file and the code within otelConfigs now allows one to fully materialize an otel-collector config file. The otel-config*.yaml files within the dockerSolution have been created via the makeConfigFiles.sh script. Signed-off-by: Greg Schohn --- TrafficCapture/dockerSolution/README.md | 52 ++++++++++- TrafficCapture/dockerSolution/build.gradle | 15 ++-- .../dockerSolution/otelConfigs/README.md | 31 +++++++ .../configSnippets/awsCloudWatch.yaml | 8 ++ .../otelConfigs/configSnippets/awsXRay.yaml | 7 ++ .../otelConfigs/configSnippets/base.yaml | 14 +++ .../configSnippets/basicAuthClient.yaml | 5 ++ .../otelConfigs/configSnippets/batch.yaml | 5 ++ .../configSnippets/batchMetrics.yaml | 4 + .../configSnippets/batchTraces.yaml | 4 + .../configSnippets/debugDetailed.yaml | 5 ++ .../otelConfigs/configSnippets/debugInfo.yaml | 5 ++ .../configSnippets/debugLogsDetailed.yaml | 4 + .../configSnippets/debugLogsInfo.yaml | 4 + .../configSnippets/debugMetricsDetailed.yaml | 4 + .../configSnippets/debugMetricsInfo.yaml | 4 + .../configSnippets/debugTracesDetailed.yaml | 4 + .../configSnippets/debugTracesInfo.yaml | 4 + .../configSnippets/healthCheck.yaml | 5 ++ .../otelConfigs/configSnippets/jaeger.yaml | 10 +++ .../otelConfigs/configSnippets/logs.yaml | 4 + .../otelConfigs/configSnippets/metrics.yaml | 4 + .../configSnippets/openSearch.yaml | 39 ++++++++ .../configSnippets/openSearchAws.yaml | 4 + .../configSnippets/openSearchLocal.yaml | 11 +++ .../otelConfigs/configSnippets/pprof.yaml | 6 ++ .../configSnippets/prometheus.yaml | 11 +++ .../otelConfigs/configSnippets/traces.yaml | 4 + .../otelConfigs/configSnippets/zpages.yaml | 6 ++ .../otelConfigs/consConfigSnippets.py | 47 ++++++++++ .../otelConfigs/dependencies.yml | 29 ++++++ .../otelConfigs/makeConfigFiles.sh | 20 +++++ .../otelConfigs/requirements.txt | 1 + .../otel-config-everything-compose.yaml | 90 +++++++++++++++++++ .../configs/otel-config-everything.yaml | 87 ++++++++++++++++++ ...l-config-prometheus-jaeger-opensearch.yaml | 84 +++++++++++++++++ .../docker/composeExtensions/otel-aws.yml | 20 +++++ .../composeExtensions/otel-everything.yml | 23 +++++ .../otel-prometheus-jaeger-opensearch.yml | 19 ++++ .../proxy-multi.yml} | 0 .../proxy-single.yml} | 0 .../src/main/docker/docker-compose.yml | 23 +---- .../main/docker/migrationConsole/Dockerfile | 5 +- .../main/docker/otel-collector-config.yaml | 43 --------- .../src/main/docker/otelCollector/Dockerfile | 14 +++ .../{otelcol => otelCollector}/README.md | 0 .../docker/otelCollector/exportCredsAndRun.sh | 17 ++++ .../otel-config-aws-debug.yaml} | 44 +++++---- .../otel-config-aws.yaml} | 26 +++--- .../otelCollector/otel-config-debug-only.yaml | 25 ++++++ .../src/main/docker/otelcol/Dockerfile | 4 - 51 files changed, 792 insertions(+), 112 deletions(-) create mode 100644 TrafficCapture/dockerSolution/otelConfigs/README.md create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsCloudWatch.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/base.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/basicAuthClient.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/batch.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchMetrics.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchTraces.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugDetailed.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugInfo.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsDetailed.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsInfo.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsDetailed.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsInfo.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesDetailed.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesInfo.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/healthCheck.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/jaeger.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearch.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchAws.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchLocal.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/pprof.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/prometheus.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/configSnippets/zpages.yaml create mode 100644 TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py create mode 100644 TrafficCapture/dockerSolution/otelConfigs/dependencies.yml create mode 100755 TrafficCapture/dockerSolution/otelConfigs/makeConfigFiles.sh create mode 100644 TrafficCapture/dockerSolution/otelConfigs/requirements.txt create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-prometheus-jaeger-opensearch.yml rename TrafficCapture/dockerSolution/src/main/docker/{docker-compose-multi.yml => composeExtensions/proxy-multi.yml} (100%) rename TrafficCapture/dockerSolution/src/main/docker/{docker-compose-single.yml => composeExtensions/proxy-single.yml} (100%) delete mode 100644 TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/otelCollector/Dockerfile rename TrafficCapture/dockerSolution/src/main/docker/{otelcol => otelCollector}/README.md (100%) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/otelCollector/exportCredsAndRun.sh rename TrafficCapture/dockerSolution/src/main/docker/{otelcol/otel-config.yml => otelCollector/otel-config-aws-debug.yaml} (74%) rename TrafficCapture/dockerSolution/src/main/docker/{otelcol/otel-config-cdk.yml => otelCollector/otel-config-aws.yaml} (81%) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml delete mode 100644 TrafficCapture/dockerSolution/src/main/docker/otelcol/Dockerfile diff --git a/TrafficCapture/dockerSolution/README.md b/TrafficCapture/dockerSolution/README.md index 6d2dc1967..c0682cbe4 100644 --- a/TrafficCapture/dockerSolution/README.md +++ b/TrafficCapture/dockerSolution/README.md @@ -8,13 +8,61 @@ down again. Notice that most of the Dockerfiles are dynamically constructed in the build hierarchy. Some efforts have been made to ensure that changes will make it into containers to be launched. -### Running the Docker Solution +## Running the Docker Solution While in the TrafficCapture directory, run the following command: `./gradlew :dockerSolution:composeUp` -### Compatibility +### Running with different telemetry flavors. + +By default, composeUp will run an otel-collector that exports instrumentation to other local containers within the +migrations network. However, the collector's export configuration can be overridden via the otel-collector property: +`TrafficCapture % ./gradlew :dockerSolution:composeUp -Potel-collector=otel-aws.yml` + +The [otel-aws.yml](src/main/docker/composeExtensions/otel-aws.yml) will use that docker-compose extension. +That extension uses the collector configurations (from the container's base image) and wires the ~/.aws/credentials +file into the container to provide the collector credentials that it needs to push metrics and traces to CloudWatch +and X-Ray. In addition to the [default configuration](src/main/docker/composeExtensions/otel-prometheus-jaeger-opensearch.yml) +to use local containers, a [third option](src/main/docker/composeExtensions/otel-everything.yml) will use _BOTH_ local +containers AND the AWS services. + +## Maintaining configurations + +### Otel-Collector configurations + +The migrations components use OpenTelemetry for instrumentation so that different systems can be utilized acrpss +(Prometheus, CloudWatch, Zipkin, etc) and across different types of infrastructure. The docker solutions vended +in this directory try to provide flexibility and consistency between different environments. Base images may +change in the future as otel matures and configurations will also need to be updated over time due to external +changes (e.g. debug/logging exporter) or internal ones (buffering parameters). To manage the 5 starting +configurations that we produce for one purpose or another, support code is within [otelConfigs](otelConfigs). + +The otel-collector configuration is contained within one yaml file. That single file configures the collector +to interface with many different systems. Maintaining consistency of configurations even as they're +copy-pasted between each other isn't scalable. Complicating matters more is that the base +otel-collector from otel and the AWS distro both lack a posix base system. That makes cons-ing any +configurations within the container challenging. The compromise struck here is to do the construction of +configuration files as a preprocessing step BEFORE docker builds. That preprocessing logic is within +dockerSolution/otelConfigs. + +A python script creates individual (but complete) otel-collector configurations +([consConfigSnippets.py](otelConfigs/consConfigSnippets.py)). +A shell script ([makeConfigFiles.sh](otelConfigs/makeConfigFiles.sh)) runs 5 configuration sets +to output the otel-config-*.yaml files that are used by the +[otelCollector image](src/main/docker/otelCollector/Dockerfile) and the +[compose configurations](src/main/docker/composeExtensions/). The compose configurations override +the original otel collector configurations with new ones. Those compose files also vary by specifying +different ports depending upon which services have been configured (Prometheus, zpages, etc ports). + +Those configurations are created by merging a set of YAML snippets into a final file. Within the +dependencies.yml file, parents (and their respective snippets) are dependencies of their children. +As consConfigSnippets.py is invoked, all ancestors' snippets are included before the children that +were specified. To further simplify management of dependencies, snippets may have multiple dependencies. +Upon hitting each dependency for the first time, all of its dependencies are also found and included +within the final yaml configuration that is being output. + +## Compatibility The tools in this directory can only be built if you have Java version 11 installed. diff --git a/TrafficCapture/dockerSolution/build.gradle b/TrafficCapture/dockerSolution/build.gradle index 5d6258042..f65a60d20 100644 --- a/TrafficCapture/dockerSolution/build.gradle +++ b/TrafficCapture/dockerSolution/build.gradle @@ -5,9 +5,7 @@ plugins { } import org.opensearch.migrations.common.CommonUtils -import java.security.MessageDigest import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage -import org.apache.tools.ant.taskdefs.condition.Os def calculateDockerHash = { projectName -> CommonUtils.calculateDockerHash(projectName, project) @@ -22,6 +20,7 @@ def dockerFilesForExternalServices = [ "elasticsearchWithSearchGuard": "elasticsearch_searchguard", "migrationConsole": "migration_console", "opensearchDashboards": "opensearch_dashboards", + "otelCollector": "otel_collector" ] // Create the static docker files that aren't hosting migrations java code from this repo dockerFilesForExternalServices.each { projectName, dockerImageName -> @@ -59,15 +58,21 @@ javaContainerServices.forEach { projectName, dockerImageName -> } dockerCompose { - useComposeFiles = project.hasProperty('multiProxy') ? - ['src/main/docker/docker-compose.yml', 'src/main/docker/docker-compose-multi.yml'] : - ['src/main/docker/docker-compose.yml', 'src/main/docker/docker-compose-single.yml'] + def extensionsDir = "src/main/docker/composeExtensions/" + useComposeFiles = + ['src/main/docker/docker-compose.yml', + "${extensionsDir}/" + (project.hasProperty("otel-collector") ? + "${project.getProperty('otel-collector')}" : + "otel-prometheus-jaeger-opensearch.yml"), + "${extensionsDir}" + (project.hasProperty("multiProxy") ? "proxy-multi.yml" : "proxy-single.yml") + ] } task buildDockerImages { dependsOn buildDockerImage_elasticsearchWithSearchGuard dependsOn buildDockerImage_migrationConsole dependsOn buildDockerImage_opensearchDashboards + dependsOn buildDockerImage_otelCollector dependsOn buildDockerImage_trafficCaptureProxyServer dependsOn buildDockerImage_trafficReplayer diff --git a/TrafficCapture/dockerSolution/otelConfigs/README.md b/TrafficCapture/dockerSolution/otelConfigs/README.md new file mode 100644 index 000000000..401432f88 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/README.md @@ -0,0 +1,31 @@ +# PRE-REQUISITE + +* Python3 and venv + +## Activate your Python virtual environment + +To isolate the Python environment for the project from your local machine, create virtual environment like so: +``` +python3 -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +``` + +You can exit the Python virtual environment and remove its resources like so: +``` +deactivate +rm -rf .venv +``` + +Learn more about venv [here](https://docs.python.org/3/library/venv.html). + +## Create otel-collector config files + +Run `consConfigSnippets.py` with the snippet components (without the .yaml extension) +that you want to include in the output (stdout). Common dependencies (as determined +by dependencies.yaml) will only be included in the final output once. + +For example +``` +python3 consConfigSnippets.py base +``` \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsCloudWatch.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsCloudWatch.yaml new file mode 100644 index 000000000..6f6b8c657 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsCloudWatch.yaml @@ -0,0 +1,8 @@ +exporters: + awsemf: + namespace: 'TrafficCaptureReplay' + +service: + pipelines: + metrics: + exporters: [ awsemf ] diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml new file mode 100644 index 000000000..a0fb4a1c2 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml @@ -0,0 +1,7 @@ +exporters: + awsxray: + +service: + pipelines: + traces: + exporters: [ awsxray ] diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/base.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/base.yaml new file mode 100644 index 000000000..20b605dfe --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/base.yaml @@ -0,0 +1,14 @@ +receivers: + otlp: + protocols: + grpc: + +processors: + +extensions: + +exporters: + +service: + extensions: + pipelines: diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/basicAuthClient.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/basicAuthClient.yaml new file mode 100644 index 000000000..4e81c6d90 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/basicAuthClient.yaml @@ -0,0 +1,5 @@ +extensions: + basicauth/client: + client_auth: + username: "admin" + password: "admin" diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batch.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batch.yaml new file mode 100644 index 000000000..c5b44dc1e --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batch.yaml @@ -0,0 +1,5 @@ +processors: + batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchMetrics.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchMetrics.yaml new file mode 100644 index 000000000..c755351ad --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchMetrics.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + metrics: + processors: [ batch ] diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchTraces.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchTraces.yaml new file mode 100644 index 000000000..a32aff59d --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/batchTraces.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + traces: + processors: [ batch ] diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugDetailed.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugDetailed.yaml new file mode 100644 index 000000000..1568fb276 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugDetailed.yaml @@ -0,0 +1,5 @@ +exporters: + logging: + verbosity: detailed + sampling_initial: 5 + sampling_thereafter: 200 \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugInfo.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugInfo.yaml new file mode 100644 index 000000000..760aabf66 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugInfo.yaml @@ -0,0 +1,5 @@ +exporters: + logging: + verbosity: info + sampling_initial: 5 + sampling_thereafter: 200 \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsDetailed.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsDetailed.yaml new file mode 100644 index 000000000..04c30b4a9 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsDetailed.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + logs: + exporters: [logging] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsInfo.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsInfo.yaml new file mode 100644 index 000000000..04c30b4a9 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugLogsInfo.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + logs: + exporters: [logging] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsDetailed.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsDetailed.yaml new file mode 100644 index 000000000..d7b4f497c --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsDetailed.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + metrics: + exporters: [logging] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsInfo.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsInfo.yaml new file mode 100644 index 000000000..d7b4f497c --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugMetricsInfo.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + metrics: + exporters: [logging] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesDetailed.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesDetailed.yaml new file mode 100644 index 000000000..c1e7f9541 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesDetailed.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + traces: + exporters: [logging] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesInfo.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesInfo.yaml new file mode 100644 index 000000000..c1e7f9541 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/debugTracesInfo.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + traces: + exporters: [logging] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/healthCheck.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/healthCheck.yaml new file mode 100644 index 000000000..5e20967f1 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/healthCheck.yaml @@ -0,0 +1,5 @@ +extensions: + health_check: + +service: + extensions: [ health_check ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/jaeger.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/jaeger.yaml new file mode 100644 index 000000000..127d7d56e --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/jaeger.yaml @@ -0,0 +1,10 @@ +exporters: + otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 + endpoint: jaeger:4317 + tls: + insecure: true + +service: + pipelines: + traces: + exporters: [ otlp/jaeger ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml new file mode 100644 index 000000000..5dfea614a --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + logs: + receivers: [ otlp ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml new file mode 100644 index 000000000..cc7b29457 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + metrics: + receivers: [ otlp ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearch.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearch.yaml new file mode 100644 index 000000000..60a70e82e --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearch.yaml @@ -0,0 +1,39 @@ +processors: + attributes: + # This processor is currently renaming two attributes + # that are prefixed with `log4j.context_data.` to the base attribute name + # to make queries within OpenSearch clearer. Both the `insert from_attribute` + # and the `delete` actions will fail silently if the attribute is not present, + # which means that these are safe for events that both do and don't have these + # attributes. This pattern should be extended to all of our standard attributes. + actions: + - key: event + from_attribute: log4j.context_data.event + action: insert + - key: log4j.context_data.event + action: delete + - key: channel_id + from_attribute: log4j.context_data.channel_id + action: insert + - key: log4j.context_data.channel_id + action: delete + # The following actions convert various should-be-int strings to ints + - key: log4j.context_data.source_http_status + action: convert + converted_type: int + - key: log4j.context_data.target_http_status + action: convert + converted_type: int + - key: log4j.context_data.http_status_match + action: convert + converted_type: int + +exporters: + opensearch: + namespace: migrations + +service: + pipelines: + logs: + processors: [ attributes ] + exporters: [ opensearch ] diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchAws.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchAws.yaml new file mode 100644 index 000000000..21e6b60de --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchAws.yaml @@ -0,0 +1,4 @@ +exporters: + opensearch: + http: + endpoint: "${ANALYTICS_DOMAIN_ENDPOINT}" \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchLocal.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchLocal.yaml new file mode 100644 index 000000000..ef1e17432 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/openSearchLocal.yaml @@ -0,0 +1,11 @@ +exporters: + opensearch: + http: + endpoint: "https://opensearchanalytics:9200" + auth: + authenticator: basicauth/client + tls: + insecure_skip_verify: true + +service: + extensions: [ basicauth/client ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/pprof.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/pprof.yaml new file mode 100644 index 000000000..f14fe1283 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/pprof.yaml @@ -0,0 +1,6 @@ +extensions: + pprof: + endpoint: :1888 + +service: + extensions: [ pprof ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/prometheus.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/prometheus.yaml new file mode 100644 index 000000000..a2d885d64 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/prometheus.yaml @@ -0,0 +1,11 @@ +exporters: + prometheus: + endpoint: "0.0.0.0:8889" + send_timestamps: true + metric_expiration: 5m + enable_open_metrics: true + +service: + pipelines: + metrics: + exporters: [ prometheus ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml new file mode 100644 index 000000000..8c7e89e60 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml @@ -0,0 +1,4 @@ +service: + pipelines: + traces: + receivers: [ otlp ] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/zpages.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/zpages.yaml new file mode 100644 index 000000000..2f7d48dc0 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/zpages.yaml @@ -0,0 +1,6 @@ +extensions: + zpages: + endpoint: :55679 + +service: + extensions: [ zpages] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py b/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py new file mode 100644 index 000000000..c7ce0fd2f --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py @@ -0,0 +1,47 @@ +import sys +import subprocess +import yaml + +def collect_snippet_dependencies(key, original_dict, + depth, + ground_truth_dict, already_collected_set, + found_at_depth_map): + + already_collected_set.add(key) + if ground_truth_dict is None: + return False + found_match = False + for parent, innerMap in ground_truth_dict.items(): + if parent == key or collect_snippet_dependencies(key, original_dict, depth + 1, + innerMap, already_collected_set, + found_at_depth_map): + if parent not in already_collected_set: + collect_snippet_dependencies(parent, original_dict, 0, + original_dict, already_collected_set, found_at_depth_map) + found_at_depth_map[parent]=depth + found_match = True + return found_match + +def construct_command(selected_keys, deps): + dependency_depth_dict = dict() + for key in selected_keys: + foundKey = collect_snippet_dependencies(key, deps, 0, deps, set(), dependency_depth_dict) + assert foundKey, f"key={key}" + ordered_snippets = sorted(dependency_depth_dict, key=lambda k: dependency_depth_dict[k]) + + files = ' '.join([f"configSnippets/{dep}.yaml" for dep in ordered_snippets]) + return f"yq eval-all '. as $item ireduce ({{}}; . *+ $item )' {files}" + +def run_command(command): + subprocess.run(command, shell=True, text=True) + +def main(selected_keys): + with open('dependencies.yml', 'r') as file: + deps = yaml.safe_load(file) + + command = construct_command(selected_keys, deps) + run_command(command) + +if __name__ == "__main__": + args = sys.argv[1:] # Arguments from command line + main(args) diff --git a/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml b/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml new file mode 100644 index 000000000..4cd9c4873 --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml @@ -0,0 +1,29 @@ +base: + metrics: + batchMetrics: + prometheus: + awsCloudWatch: + traces: + batchTraces: + awsXRay: + jaeger: + basicAuthClient: + openSearchLocal: + batch: + batchMetrics: + batchTraces: + debugDetailed: + debugMetricsDetailed: + debugTracesDetailed: + debugLogsDetailed: + debugInfo: + debugMetricsInfo: + debugTracesInfo: + debugLogsInfo: + healthCheck: + logs: + openSearch: + openSearchAws: + openSearchLocal: + pprof: + zpages: \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/makeConfigFiles.sh b/TrafficCapture/dockerSolution/otelConfigs/makeConfigFiles.sh new file mode 100755 index 000000000..c6b96f61e --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/makeConfigFiles.sh @@ -0,0 +1,20 @@ +#!/bin/sh + +python3 -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt + +DOCKER_IMAGE_BUILTINS=../src/main/docker/otelCollector + +python3 consConfigSnippets.py awsCloudWatch awsXRay openSearchAws healthCheck > "${DOCKER_IMAGE_BUILTINS}/otel-config-aws.yaml" +python3 consConfigSnippets.py awsCloudWatch awsXRay openSearchAws healthCheck debugMetricsDetailed debugTracesDetailed debugLogsDetailed > "${DOCKER_IMAGE_BUILTINS}/otel-config-aws-debug.yaml" +python3 consConfigSnippets.py zpages pprof healthCheck debugTracesDetailed debugMetricsDetailed debugTracesDetailed debugLogsDetailed > "${DOCKER_IMAGE_BUILTINS}/otel-config-debug-only.yaml" + +COMPOSE_EXTENSIONS=../src/main/docker/composeExtensions/configs +python3 consConfigSnippets.py awsCloudWatch awsXRay prometheus jaeger openSearchLocal zpages pprof healthCheck debugMetricsDetailed debugTracesDetailed debugLogsDetailed > "${COMPOSE_EXTENSIONS}/otel-config-everything.yaml" +python3 consConfigSnippets.py prometheus jaeger openSearchLocal zpages pprof healthCheck debugMetricsDetailed debugTracesDetailed debugLogsDetailed > "${COMPOSE_EXTENSIONS}/otel-config-prometheus-jaeger-opensearch.yaml" + +echo Done making files + +deactivate +rm -rf .venv \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/requirements.txt b/TrafficCapture/dockerSolution/otelConfigs/requirements.txt new file mode 100644 index 000000000..dbfc7099c --- /dev/null +++ b/TrafficCapture/dockerSolution/otelConfigs/requirements.txt @@ -0,0 +1 @@ +PyYAML \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml new file mode 100644 index 000000000..654c432c1 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml @@ -0,0 +1,90 @@ +receivers: + otlp: + protocols: + grpc: + +processors: + batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 + attributes: + # This processor is currently renaming two attributes + # that are prefixed with `log4j.context_data.` to the base attribute name + # to make queries within OpenSearch clearer. Both the `insert from_attribute` + # and the `delete` actions will fail silently if the attribute is not present, + # which means that these are safe for events that both do and don't have these + # attributes. This pattern should be extended to all of our standard attributes. + actions: + - key: event + from_attribute: log4j.context_data.event + action: insert + - key: log4j.context_data.event + action: delete + - key: channel_id + from_attribute: log4j.context_data.channel_id + action: insert + - key: log4j.context_data.channel_id + action: delete + # The following actions convert various should-be-int strings to ints + - key: log4j.context_data.source_http_status + action: convert + converted_type: int + - key: log4j.context_data.target_http_status + action: convert + converted_type: int + - key: log4j.context_data.http_status_match + action: convert + converted_type: int + +extensions: + basicauth/client: + client_auth: + username: "admin" + password: "admin" + health_check: + zpages: + endpoint: :55679 + +exporters: + logging: + verbosity: debug + + awsemf: + namespace: 'TrafficCaptureReplay' + prometheus: + endpoint: "0.0.0.0:8889" + send_timestamps: true + metric_expiration: 5m + enable_open_metrics: true + + awsxray: + otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 + endpoint: jaeger:4317 + tls: + insecure: true + + opensearch: + namespace: migrations + http: + endpoint: "https://opensearchanalytics:9200" + auth: + authenticator: basicauth/client + tls: + insecure_skip_verify: true + +service: + extensions: [ zpages, health_check, basicauth/client ] + pipelines: + traces: + receivers: [ otlp ] + processors: [ batch ] + exporters: [ otlp/jaeger, awsxray ] + metrics: + receivers: [ otlp ] + processors: [ batch ] + exporters: [ logging, prometheus, awsemf ] + logs: + receivers: [otlp] + processors: [attributes] + exporters: [logging, debug, opensearch] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml new file mode 100644 index 000000000..9e896d414 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml @@ -0,0 +1,87 @@ +receivers: + otlp: + protocols: + grpc: +processors: + batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 + attributes: + # This processor is currently renaming two attributes + # that are prefixed with `log4j.context_data.` to the base attribute name + # to make queries within OpenSearch clearer. Both the `insert from_attribute` + # and the `delete` actions will fail silently if the attribute is not present, + # which means that these are safe for events that both do and don't have these + # attributes. This pattern should be extended to all of our standard attributes. + actions: + - key: event + from_attribute: log4j.context_data.event + action: insert + - key: log4j.context_data.event + action: delete + - key: channel_id + from_attribute: log4j.context_data.channel_id + action: insert + - key: log4j.context_data.channel_id + action: delete + # The following actions convert various should-be-int strings to ints + - key: log4j.context_data.source_http_status + action: convert + converted_type: int + - key: log4j.context_data.target_http_status + action: convert + converted_type: int + - key: log4j.context_data.http_status_match + action: convert + converted_type: int +extensions: + basicauth/client: + client_auth: + username: "admin" + password: "admin" + zpages: + endpoint: :55679 + pprof: + endpoint: :1888 + health_check: +exporters: + logging: + verbosity: detailed + sampling_initial: 5 + sampling_thereafter: 200 + opensearch: + namespace: migrations + http: + endpoint: "https://opensearchanalytics:9200" + auth: + authenticator: basicauth/client + tls: + insecure_skip_verify: true + awsemf: + namespace: 'TrafficCaptureReplay' + awsxray: + prometheus: + endpoint: "0.0.0.0:8889" + send_timestamps: true + metric_expiration: 5m + enable_open_metrics: true + otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 + endpoint: jaeger:4317 + tls: + insecure: true +service: + extensions: [zpages, pprof, health_check, basicauth/client] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [logging, awsemf, prometheus] + traces: + receivers: [otlp] + processors: [batch] + exporters: [logging, awsxray, otlp/jaeger] + logs: + receivers: [otlp] + processors: [attributes] + exporters: [opensearch, logging] diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml new file mode 100644 index 000000000..84ba267de --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml @@ -0,0 +1,84 @@ +receivers: + otlp: + protocols: + grpc: +processors: + batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 + attributes: + # This processor is currently renaming two attributes + # that are prefixed with `log4j.context_data.` to the base attribute name + # to make queries within OpenSearch clearer. Both the `insert from_attribute` + # and the `delete` actions will fail silently if the attribute is not present, + # which means that these are safe for events that both do and don't have these + # attributes. This pattern should be extended to all of our standard attributes. + actions: + - key: event + from_attribute: log4j.context_data.event + action: insert + - key: log4j.context_data.event + action: delete + - key: channel_id + from_attribute: log4j.context_data.channel_id + action: insert + - key: log4j.context_data.channel_id + action: delete + # The following actions convert various should-be-int strings to ints + - key: log4j.context_data.source_http_status + action: convert + converted_type: int + - key: log4j.context_data.target_http_status + action: convert + converted_type: int + - key: log4j.context_data.http_status_match + action: convert + converted_type: int +extensions: + basicauth/client: + client_auth: + username: "admin" + password: "admin" + zpages: + endpoint: :55679 + pprof: + endpoint: :1888 + health_check: +exporters: + logging: + verbosity: detailed + sampling_initial: 5 + sampling_thereafter: 200 + opensearch: + namespace: migrations + http: + endpoint: "https://opensearchanalytics:9200" + auth: + authenticator: basicauth/client + tls: + insecure_skip_verify: true + prometheus: + endpoint: "0.0.0.0:8889" + send_timestamps: true + metric_expiration: 5m + enable_open_metrics: true + otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 + endpoint: jaeger:4317 + tls: + insecure: true +service: + extensions: [zpages, pprof, health_check, basicauth/client] + pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [logging, prometheus] + traces: + receivers: [otlp] + processors: [batch] + exporters: [logging, otlp/jaeger] + logs: + receivers: [otlp] + processors: [attributes] + exporters: [opensearch, logging] diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml new file mode 100644 index 000000000..39aa7b81b --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml @@ -0,0 +1,20 @@ +version: '3.7' +services: + + otel-collector: + image: migrations/otel_collector:latest + command: ["--config=/etc/otel-config-aws.yaml", "${OTELCOL_ARGS}"] + networks: + - migrations + volumes: + - ~/.aws:/home/aoc/.aws + ports: + - "13133:13133" # health_check extension + - "4317:4317" # otlp receiver + depends_on: + - jaeger + environment: + - ANALYTICS_DOMAIN_ENDPOINT=opensearchanalytics # use the local container for compose here + - AWS_REGION=us-east-1 + - AWS_DEFAULT_REGION=us-east-1 + - AWS_PROFILE=default \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml new file mode 100644 index 000000000..2ee2b15f7 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml @@ -0,0 +1,23 @@ +version: '3.7' +services: + + otel-collector: + image: migrations/otel_collector:latest + command: ["--config=/etc/otel-config-everything.yaml", "${OTELCOL_ARGS}"] + networks: + - migrations + volumes: + - ./composeExtensions/configs/otel-config-everything.yaml:/etc/otel-config-everything.yaml + - ~/.aws:/home/aoc/.aws + ports: + - "8888:8888" # Prometheus metrics exposed by the collector + - "8889:8889" # Prometheus exporter metrics + - "13133:13133" # health_check extension + - "55679:55679" # zpages extension + - "4317:4317" # otlp receiver + depends_on: + - jaeger + environment: + - AWS_REGION=us-east-1 + - AWS_DEFAULT_REGION=us-east-1 + - AWS_PROFILE=default \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-prometheus-jaeger-opensearch.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-prometheus-jaeger-opensearch.yml new file mode 100644 index 000000000..719995c01 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-prometheus-jaeger-opensearch.yml @@ -0,0 +1,19 @@ +version: '3.7' +services: + + otel-collector: + image: migrations/otel_collector:latest + command: ["--config=/etc/otel-config-prometheus-jaeger-opensearch.yaml", "${OTELCOL_ARGS}"] + networks: + - migrations + volumes: + - ./composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml:/etc/otel-config-prometheus-jaeger-opensearch.yaml + ports: + - "1888:1888" # pprof extension + - "8888:8888" # Prometheus metrics exposed by the collector + - "8889:8889" # Prometheus exporter metrics + - "13133:13133" # health_check extension + - "55679:55679" # zpages extension + - "4317:4317" # otlp receiver + depends_on: + - jaeger \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose-multi.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/proxy-multi.yml similarity index 100% rename from TrafficCapture/dockerSolution/src/main/docker/docker-compose-multi.yml rename to TrafficCapture/dockerSolution/src/main/docker/composeExtensions/proxy-multi.yml diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose-single.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/proxy-single.yml similarity index 100% rename from TrafficCapture/dockerSolution/src/main/docker/docker-compose-single.yml rename to TrafficCapture/dockerSolution/src/main/docker/composeExtensions/proxy-single.yml diff --git a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml index cdf76ecdc..bff5b3c42 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/docker-compose.yml @@ -34,31 +34,12 @@ services: ports: - "3000:3000" volumes: - - grafana_data:/var/lib/grafana + - ./grafana_data:/var/lib/grafana environment: - GF_SECURITY_ADMIN_PASSWORD=admin depends_on: - prometheus - # Collector - otel-collector: - image: otel/opentelemetry-collector:latest -# command: ["--config=/etc/otel-collector-config.yaml", "${OTELCOL_ARGS}"] - networks: - - migrations - volumes: -# - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml - - ./otel-collector-config.yaml:/etc/otelcol/config.yaml - ports: - - "1888:1888" # pprof extension - - "8888:8888" # Prometheus metrics exposed by the collector - - "8889:8889" # Prometheus exporter metrics - - "13133:13133" # health_check extension - - "55679:55679" # zpages extension - - "4317:4317" # otlp receiver - depends_on: - - jaeger - zookeeper: image: docker.io/bitnami/zookeeper:3.8 networks: @@ -142,7 +123,7 @@ services: # - "4317:4317" # - "13133:13133" # volumes: -# - ./otelcol/otel-config.yml:/etc/otel-config.yml +# - ./otelcol/otel-config-debug-only.yaml:/etc/otel-config-debug-only.yaml # networks: # - migrations # depends_on: diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile index 44fa2a848..69186c587 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile @@ -5,9 +5,8 @@ ENV DEBIAN_FRONTEND noninteractive RUN apt-get update && \ apt-get install -y --no-install-recommends python3.9 python3-pip python3-dev openjdk-11-jre-headless wget gcc libc-dev git curl vim jq unzip less && \ pip3 install urllib3==1.25.11 opensearch-benchmark==1.1.0 awscurl tqdm -# TODO upon the next release of opensearch-benchmark the awscli package should be installed by pip3, \ -# with the expected boto3 version upgrade resolving the current conflicts between opensearch-benchmark and awscli -RUN pip install awscli +# TODO upon the next release of opensearch-benchmark the awscli package should be installed by pip3, with the expected boto3 version upgrade resolving the current conflicts between opensearch-benchmark and awscli +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && unzip awscliv2.zip && ./aws/install && rm -rf aws awscliv2.zip RUN mkdir /root/kafka-tools RUN mkdir /root/kafka-tools/aws COPY runTestBenchmarks.sh /root/ diff --git a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml b/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml deleted file mode 100644 index 3334c57d8..000000000 --- a/TrafficCapture/dockerSolution/src/main/docker/otel-collector-config.yaml +++ /dev/null @@ -1,43 +0,0 @@ -receivers: - otlp: - protocols: - grpc: - -processors: - batch: - timeout: 10s - send_batch_size: 8192 - send_batch_max_size: 10000 - -exporters: - prometheus: - endpoint: "0.0.0.0:8889" - send_timestamps: true - metric_expiration: 5m - enable_open_metrics: true - logging: - loglevel: debug - - otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 - endpoint: jaeger:4317 - tls: - insecure: true - -extensions: - health_check: - pprof: - endpoint: :1888 - zpages: - endpoint: :55679 - -service: - extensions: [ pprof, zpages, health_check ] - pipelines: - traces: - receivers: [ otlp ] - processors: [ batch ] - exporters: [ otlp/jaeger ] - metrics: - receivers: [ otlp ] - processors: [ batch ] - exporters: [ logging,prometheus ] diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/Dockerfile new file mode 100644 index 000000000..e5083c6bf --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/Dockerfile @@ -0,0 +1,14 @@ +FROM public.ecr.aws/a0w2c5q7/otelcol-with-opensearch:amd-latest +#FROM public.ecr.aws/aws-observability/aws-otel-collector:v0.37.0 + +COPY otel-config*.yaml /etc/ +COPY exportCredsAndRun.sh / +RUN chmod ugo+x exportCredsAndRun.sh + +# Make this image consistent with the AWS Distro for OpenTelemetry. That +# is a leaner (and official) image that still has what we need, except for +# the OpenSearch logs exporter. +RUN useradd -m aoc +USER aoc +ENTRYPOINT ["./exportCredsAndRun.sh", "./otelcontribcol"] +CMD ["--config", "/etc/otel-config-debug-only.yaml"] diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelcol/README.md b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/README.md similarity index 100% rename from TrafficCapture/dockerSolution/src/main/docker/otelcol/README.md rename to TrafficCapture/dockerSolution/src/main/docker/otelCollector/README.md diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/exportCredsAndRun.sh b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/exportCredsAndRun.sh new file mode 100644 index 000000000..ce7ad9eaf --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/exportCredsAndRun.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +PROFILE_NAME=${AWS_PROFILE:-"default"} +echo "Using profile " $PROFILE_NAME + +CREDENTIALS_FILE="$HOME/.aws/credentials" + +# Check if the AWS credentials file exists +if [ -f "$CREDENTIALS_FILE" ]; then + if grep -q "^\[$PROFILE_NAME\]" "$CREDENTIALS_FILE"; then + export AWS_ACCESS_KEY_ID=$(awk -F "=" "/^\[$PROFILE_NAME\]/ {f=1} f==1 && /aws_access_key_id/ {print \$2; exit}" $CREDENTIALS_FILE) + export AWS_SECRET_ACCESS_KEY=$(awk -F "=" "/^\[$PROFILE_NAME\]/ {f=1} f==1 && /aws_secret_access_key/ {print \$2; exit}" $CREDENTIALS_FILE) + export AWS_SESSION_TOKEN=$(awk -F "=" "/^\[$PROFILE_NAME\]/ {f=1} f==1 && /aws_session_token/ {print \$2; exit}" $CREDENTIALS_FILE) + fi +fi + +"$@" diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml similarity index 74% rename from TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml rename to TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml index e84720698..30ff91294 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml @@ -2,9 +2,11 @@ receivers: otlp: protocols: grpc: - processors: batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 attributes: # This processor is currently renaming two attributes # that are prefixed with `log4j.context_data.` to the base attribute name @@ -33,36 +35,32 @@ processors: - key: log4j.context_data.http_status_match action: convert converted_type: int - extensions: - basicauth/client: - client_auth: - username: "admin" - password: "admin" health_check: - exporters: + logging: + verbosity: detailed + sampling_initial: 5 + sampling_thereafter: 200 opensearch: namespace: migrations http: - endpoint: "https://opensearchanalytics:9200" - auth: - authenticator: basicauth/client - tls: - insecure_skip_verify: true - logging: - verbosity: detailed - file: - path: /logs/filename.json - debug: - + endpoint: "${ANALYTICS_DOMAIN_ENDPOINT}" + awsemf: + namespace: 'TrafficCaptureReplay' + awsxray: service: - extensions: [health_check, basicauth/client] - telemetry: - logs: - level: "debug" + extensions: [health_check] pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [logging, awsemf] + traces: + receivers: [otlp] + processors: [batch] + exporters: [logging, awsxray] logs: receivers: [otlp] processors: [attributes] - exporters: [logging, debug, opensearch, file] \ No newline at end of file + exporters: [opensearch, logging] diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config-cdk.yml b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml similarity index 81% rename from TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config-cdk.yml rename to TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml index 093c6360f..3d1cf1a51 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otelcol/otel-config-cdk.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml @@ -2,9 +2,11 @@ receivers: otlp: protocols: grpc: - processors: batch: + timeout: 10s + send_batch_size: 8192 + send_batch_max_size: 10000 attributes: # This processor is currently renaming two attributes # that are prefixed with `log4j.context_data.` to the base attribute name @@ -33,26 +35,28 @@ processors: - key: log4j.context_data.http_status_match action: convert converted_type: int - extensions: health_check: - exporters: opensearch: namespace: migrations http: endpoint: "${ANALYTICS_DOMAIN_ENDPOINT}" - logging: - verbosity: detailed - debug: - + awsemf: + namespace: 'TrafficCaptureReplay' + awsxray: service: extensions: [health_check] - telemetry: - logs: - level: "debug" pipelines: + metrics: + receivers: [otlp] + processors: [batch] + exporters: [awsemf] + traces: + receivers: [otlp] + processors: [batch] + exporters: [awsxray] logs: receivers: [otlp] processors: [attributes] - exporters: [logging, debug, opensearch] \ No newline at end of file + exporters: [opensearch] diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml new file mode 100644 index 000000000..52b1a176e --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml @@ -0,0 +1,25 @@ +receivers: + otlp: + protocols: + grpc: +processors: +extensions: + zpages: + endpoint: :55679 + pprof: + endpoint: :1888 + health_check: +exporters: + logging: + verbosity: detailed + sampling_initial: 5 + sampling_thereafter: 200 +service: + extensions: [zpages, pprof, health_check] + pipelines: + traces: + exporters: [logging] + metrics: + exporters: [logging] + logs: + exporters: [logging] diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelcol/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/otelcol/Dockerfile deleted file mode 100644 index 687d8e910..000000000 --- a/TrafficCapture/dockerSolution/src/main/docker/otelcol/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM public.ecr.aws/a0w2c5q7/otelcol-with-opensearch:amd-latest - -COPY ./otel-config-cdk.yml /etc/otel-config.yml -ENTRYPOINT ["./otelcontribcol", "--config", "/etc/otel-config.yml"] \ No newline at end of file From a4173c052d62cb4afd68dc2d4150ddee1600cfbb Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 27 Jan 2024 17:51:20 -0500 Subject: [PATCH 75/94] PR feedback including: Use System.nanotime() instead of Instant.now() for duration calculations. Call the fillAttributes() method from the super (class or interface) before doing any more puts. Correct the order that attributes are pulled from the scope hierarchy (and add a test) Use the ActivityNames pattern for IWireCaptureContexts StreamLifecycleManager no longer implements Autoclosable. I didn't need it to close down contexts and no classes had implemented a non-empty version except to log that it was called. Signed-off-by: Greg Schohn --- .../kafkaoffloader/KafkaCaptureFactory.java | 5 -- .../tracing/KafkaRecordContext.java | 4 +- .../FileConnectionCaptureFactory.java | 2 - ...eamChannelConnectionCaptureSerializer.java | 1 - .../StreamLifecycleManager.java | 4 +- ...hannelConnectionCaptureSerializerTest.java | 3 - .../InMemoryConnectionCaptureFactory.java | 2 - .../tracing/BaseNestedSpanContext.java | 5 +- .../tracing/IInstrumentationAttributes.java | 8 +- .../tracing/IWithStartTimeAndAttributes.java | 5 +- .../migrations/tracing/RootOtelContext.java | 4 +- .../commoncontexts/IConnectionContext.java | 3 +- .../IHttpTransactionContext.java | 3 +- .../IInstrumentationAttributesTest.java | 75 +++++++++++++++++++ TrafficCapture/dockerSolution/README.md | 2 +- .../otelConfigs/consConfigSnippets.py | 8 +- .../netty/tracing/IWireCaptureContexts.java | 15 +++- .../netty/tracing/WireCaptureContexts.java | 6 -- .../proxyserver/CaptureProxy.java | 3 - .../replay/tracing/IReplayContexts.java | 8 +- .../replay/kafka/KafkaTracingTest.java | 10 --- 21 files changed, 116 insertions(+), 60 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java delete mode 100644 TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java index 833197d77..a207a01ba 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/KafkaCaptureFactory.java @@ -86,11 +86,6 @@ public StreamManager(IRootKafkaOffloaderContext rootScope, IConnectionContext ct this.startTime = Instant.now(); } - @Override - public void close() throws IOException { - log.atInfo().setMessage(() -> "factory.close()").log(); - } - @Override public CodedOutputStreamWrapper createStream() { telemetryContext.getCurrentSpan().addEvent("streamCreated"); diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index afd878881..7a6cdaa23 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -7,6 +7,7 @@ import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; @@ -55,7 +56,8 @@ private MetricInstruments(Meter meter, String activityName) { @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder.put(TOPIC_ATTR, getTopic()) + return super.fillAttributes(builder) + .put(TOPIC_ATTR, getTopic()) .put(RECORD_ID_ATTR, getRecordId()) .put(RECORD_SIZE_ATTR, getRecordSize()); } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java index dc1cadeb0..38a0a3bf7 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/FileConnectionCaptureFactory.java @@ -52,8 +52,6 @@ public FileConnectionCaptureFactory(String nodeId, String path, int bufferSize) @AllArgsConstructor class StreamManager extends OrderedStreamLifecyleManager { String connectionId; - @Override - public void close() {} @Override public CodedOutputStreamAndByteBufferWrapper createStream() { diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java index fdd2344ff..75a4d90b8 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializer.java @@ -202,7 +202,6 @@ public CompletableFuture flushCommitAndResetStream(boolean isFinal) throws IO currentCodedOutputStreamHolderOrNull = null; if (isFinal) { streamHasBeenClosed = true; - streamManager.close(); } } } diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java index b7c97f3c2..74de0277a 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/StreamLifecycleManager.java @@ -3,10 +3,8 @@ import java.io.IOException; import java.util.concurrent.CompletableFuture; -public interface StreamLifecycleManager extends AutoCloseable { +public interface StreamLifecycleManager { CodedOutputStreamHolder createStream(); CompletableFuture closeStream(CodedOutputStreamHolder outputStreamHolder, int index); - - void close() throws IOException; } diff --git a/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java b/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java index 46ae835b2..0c5ce6539 100644 --- a/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java +++ b/TrafficCapture/captureOffloader/src/test/java/org/opensearch/migrations/trafficcapture/StreamChannelConnectionCaptureSerializerTest.java @@ -319,9 +319,6 @@ class StreamManager extends OrderedStreamLifecyleManager { int bufferSize; ConcurrentLinkedQueue outputBuffers; - @Override - public void close() {} - @Override public CodedOutputStreamHolder createStream() { return new CodedOutputStreamAndByteBufferWrapper(bufferSize); diff --git a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java index ce2710906..09b9b01c4 100644 --- a/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java +++ b/TrafficCapture/captureOffloader/src/testFixtures/java/org/opensearch/migrations/trafficcapture/InMemoryConnectionCaptureFactory.java @@ -34,8 +34,6 @@ public InMemoryConnectionCaptureFactory(String nodeId, int bufferSize, Runnable @AllArgsConstructor class StreamManager extends OrderedStreamLifecyleManager { - @Override - public void close() {} @Override public CodedOutputStreamHolder createStream() { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index e2300ab9d..9d665fcca 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -7,7 +7,6 @@ import lombok.Getter; import lombok.NonNull; -import java.time.Instant; import java.util.Optional; import java.util.stream.Stream; @@ -16,7 +15,7 @@ public abstract class BaseNestedSpanContext implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, IHasRootInstrumentationScope, AutoCloseable { final T enclosingScope; @Getter - final Instant startTime; + final long startNanoTime; @Getter private Span currentSpan; @Getter @@ -32,7 +31,7 @@ protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder a protected BaseNestedSpanContext(S rootScope, T enclosingScope) { rootScope.onContextCreated(this); this.enclosingScope = enclosingScope; - this.startTime = Instant.now(); + this.startNanoTime = System.nanoTime(); this.rootInstrumentationScope = rootScope; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 0f1397fd5..3b938bce0 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -45,15 +45,13 @@ default Attributes getPopulatedSpanAttributes() { default AttributesBuilder getPopulatedSpanAttributesBuilder() { var builder = Attributes.builder(); var currentObj = this; + // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts var stack = new ArrayDeque(); while (currentObj != null) { - stack.add(currentObj); + stack.addFirst(currentObj); currentObj = currentObj.getEnclosingScope(); } - // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts - return StreamSupport.stream( - Spliterators.spliteratorUnknownSize(stack.descendingIterator(), Spliterator.ORDERED), false) - .collect(Utils.foldLeft(builder, (b, iia)->iia.fillAttributes(b))); + return stack.stream().collect(Utils.foldLeft(builder, (b, iia)->iia.fillAttributes(b))); } default void meterIncrementEvent(LongCounter c) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java index 814b0199d..d844aebaf 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IWithStartTimeAndAttributes.java @@ -4,13 +4,12 @@ import io.opentelemetry.api.metrics.DoubleHistogram; import java.time.Duration; -import java.time.Instant; public interface IWithStartTimeAndAttributes extends IInstrumentationAttributes { - Instant getStartTime(); + long getStartNanoTime(); default Duration getSpanDuration() { - return Duration.between(getStartTime(), Instant.now()); + return Duration.ofNanos(System.nanoTime() - getStartNanoTime()); } default void meterHistogramMillis(DoubleHistogram histogram) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 92a2edd33..c206e2273 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -128,8 +128,8 @@ public MeterProvider getMeterProvider() { @Override public AttributesBuilder fillAttributes(AttributesBuilder builder) { - assert observedExceptionToIncludeInMetrics == null; - return builder; // nothing more to do + assert observedExceptionToIncludeInMetrics == null; // nothing more to do than this check + return IRootOtelContext.super.fillAttributes(builder); } private static SpanBuilder addLinkedToBuilder(Stream linkedSpanContexts, SpanBuilder spanBuilder) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 845b13f40..f2e23afb5 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -17,7 +17,8 @@ public interface IConnectionContext extends IScopedInstrumentationAttributes { @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder.put(CONNECTION_ID_ATTR, getConnectionId()) + return IScopedInstrumentationAttributes.super.fillAttributes(builder) + .put(CONNECTION_ID_ATTR, getConnectionId()) .put(NODE_ID_ATTR, getNodeId()); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index bebcee137..7b3309a78 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -11,6 +11,7 @@ public interface IHttpTransactionContext extends IScopedInstrumentationAttribute @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder.put(SOURCE_REQUEST_INDEX_KEY, getSourceRequestIndex()); + return IScopedInstrumentationAttributes.super.fillAttributes(builder) + .put(SOURCE_REQUEST_INDEX_KEY, getSourceRequestIndex()); } } diff --git a/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java new file mode 100644 index 000000000..f3da40f50 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java @@ -0,0 +1,75 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Optional; + +class IInstrumentationAttributesTest { + + private static final AttributeKey OVERRIDE_KEY = AttributeKey.stringKey("overrideKey"); + private static final AttributeKey UNIQUE_KEY = AttributeKey.stringKey("uniqueKey"); + + private static class AContext extends BaseNestedSpanContext{ + protected AContext(RootOtelContext rootScope, RootOtelContext enclosingScope) { + super(rootScope, enclosingScope); + } + + @Override + public String getActivityName() { + return "A"; + } + + @Override + public CommonScopedMetricInstruments getMetrics() { + return null; + } + + @Override + public AttributesBuilder fillAttributes(AttributesBuilder builder) { + return super.fillAttributes(builder) + .put(OVERRIDE_KEY, "a-toBeOverridden") + .put(UNIQUE_KEY, "a-toStay"); + } + } + + private static class BContext extends BaseNestedSpanContext{ + protected BContext(RootOtelContext rootScope, AContext enclosingScope) { + super(rootScope, enclosingScope); + } + + @Override + public String getActivityName() { + return "B"; + } + + @Override + public CommonScopedMetricInstruments getMetrics() { + return null; + } + + @Override + public AttributesBuilder fillAttributes(AttributesBuilder builder) { + return super.fillAttributes(builder) + .put(OVERRIDE_KEY, "b"); + } + } + + @Test + public void getPopulatedAttributesAreOverrideCorrectly() { + var rootCtx = new RootOtelContext("test"); + var aCtx = new AContext(rootCtx, rootCtx); + var bCtx = new BContext(rootCtx, aCtx); + + Optional.ofNullable(aCtx.getPopulatedSpanAttributes()).ifPresent(attrs-> { + Assertions.assertEquals("a-toBeOverridden", attrs.get(OVERRIDE_KEY)); + Assertions.assertEquals("a-toStay", attrs.get(UNIQUE_KEY)); + }); + Optional.ofNullable(bCtx.getPopulatedSpanAttributes()).ifPresent(attrs-> { + Assertions.assertEquals("b", attrs.get(OVERRIDE_KEY)); + Assertions.assertEquals("a-toStay", attrs.get(UNIQUE_KEY)); + }); + } +} \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/README.md b/TrafficCapture/dockerSolution/README.md index c0682cbe4..4d31eb2c4 100644 --- a/TrafficCapture/dockerSolution/README.md +++ b/TrafficCapture/dockerSolution/README.md @@ -31,7 +31,7 @@ containers AND the AWS services. ### Otel-Collector configurations -The migrations components use OpenTelemetry for instrumentation so that different systems can be utilized acrpss +The migrations components use OpenTelemetry for instrumentation so that different systems can be utilized across (Prometheus, CloudWatch, Zipkin, etc) and across different types of infrastructure. The docker solutions vended in this directory try to provide flexibility and consistency between different environments. Base images may change in the future as otel matures and configurations will also need to be updated over time due to external diff --git a/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py b/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py index c7ce0fd2f..4606f2420 100644 --- a/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py +++ b/TrafficCapture/dockerSolution/otelConfigs/consConfigSnippets.py @@ -2,6 +2,7 @@ import subprocess import yaml + def collect_snippet_dependencies(key, original_dict, depth, ground_truth_dict, already_collected_set, @@ -18,10 +19,11 @@ def collect_snippet_dependencies(key, original_dict, if parent not in already_collected_set: collect_snippet_dependencies(parent, original_dict, 0, original_dict, already_collected_set, found_at_depth_map) - found_at_depth_map[parent]=depth + found_at_depth_map[parent] = depth found_match = True return found_match + def construct_command(selected_keys, deps): dependency_depth_dict = dict() for key in selected_keys: @@ -32,9 +34,11 @@ def construct_command(selected_keys, deps): files = ' '.join([f"configSnippets/{dep}.yaml" for dep in ordered_snippets]) return f"yq eval-all '. as $item ireduce ({{}}; . *+ $item )' {files}" + def run_command(command): subprocess.run(command, shell=True, text=True) + def main(selected_keys): with open('dependencies.yml', 'r') as file: deps = yaml.safe_load(file) @@ -42,6 +46,8 @@ def main(selected_keys): command = construct_command(selected_keys, deps) run_command(command) + if __name__ == "__main__": args = sys.argv[1:] # Arguments from command line main(args) + diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java index f5294bfcc..26d9b4e08 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java @@ -6,6 +6,13 @@ public abstract class IWireCaptureContexts { + public static class ActivityNames { + public static final String BLOCKED = "blocked"; + public static final String GATHERING_REQUEST = "gatheringRequest"; + public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; + public static final String GATHERING_RESPONSE = "gatheringResponse"; + } + public static class MetricNames { public static final String UNREGISTERED = "unregistered"; public static final String REMOVED = "removed"; @@ -41,7 +48,7 @@ public interface IHttpMessageContext } public interface IRequestContext extends IHttpMessageContext { - String ACTIVITY_NAME = "gatheringRequest"; + String ACTIVITY_NAME = ActivityNames.GATHERING_REQUEST; default String getActivityName() { return ACTIVITY_NAME; @@ -57,7 +64,7 @@ default String getActivityName() { } public interface IBlockingContext extends IHttpMessageContext { - String ACTIVITY_NAME = "blocked"; + String ACTIVITY_NAME = ActivityNames.BLOCKED; default String getActivityName() { return ACTIVITY_NAME; @@ -65,7 +72,7 @@ default String getActivityName() { } public interface IWaitingForResponseContext extends IHttpMessageContext { - String ACTIVITY_NAME = "waitingForResponse"; + String ACTIVITY_NAME = ActivityNames.WAITING_FOR_RESPONSE; default String getActivityName() { return ACTIVITY_NAME; @@ -73,7 +80,7 @@ default String getActivityName() { } public interface IResponseContext extends IHttpMessageContext { - String ACTIVITY_NAME = "gatheringResponse"; + String ACTIVITY_NAME = ActivityNames.GATHERING_RESPONSE; default String getActivityName() { return ACTIVITY_NAME; diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java index 8785d5251..0e452ba79 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/WireCaptureContexts.java @@ -179,8 +179,6 @@ public void onBytesRead(int size) { public static class BlockingContext extends HttpMessageContext implements IWireCaptureContexts.IBlockingContext { - public static final String ACTIVITY_NAME = "blocked"; - public BlockingContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, long sourceRequestIndex) { @@ -211,8 +209,6 @@ public RequestContext.MetricInstruments getMetrics() { public static class WaitingForResponseContext extends HttpMessageContext implements IWireCaptureContexts.IWaitingForResponseContext { - public static final String ACTIVITY_NAME = "waitingForResponse"; - public WaitingForResponseContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, long sourceRequestIndex) { @@ -243,8 +239,6 @@ public RequestContext.MetricInstruments getMetrics() { public static class ResponseContext extends HttpMessageContext implements IWireCaptureContexts.IResponseContext { - public static final String ACTIVITY_NAME = "gatheringResponse"; - public ResponseContext(RootWireLoggingContext rootWireLoggingContext, IConnectionContext enclosingScope, long sourceRequestIndex) { diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index a1da3b95e..a705845ff 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -188,9 +188,6 @@ private static IConnectionCaptureFactory getNullConnectionCaptureFactory System.err.println("No trace log directory specified. Logging to /dev/null"); return ctx -> new StreamChannelConnectionCaptureSerializer<>(null, ctx.getConnectionId(), new StreamLifecycleManager<>() { - @Override - public void close() {} - @Override public CodedOutputStreamHolder createStream() { return () -> CodedOutputStream.newInstance(NullOutputStream.getInstance()); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index d639ce043..14ef5c670 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -5,6 +5,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; @@ -102,7 +103,8 @@ default String getActivityName() { @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IAccumulationScope.super.fillAttributes(builder.put(RECORD_ID_KEY, getRecordId())); + return IAccumulationScope.super.fillAttributes( + builder.put(RECORD_ID_KEY, getRecordId())); } ITrafficStreamsLifecycleContext createTrafficLifecyleContext(ITrafficStreamKey tsk); @@ -171,8 +173,8 @@ default long replayerRequestIndex() { @Override default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext.super.fillAttributes( - builder.put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex())); + return org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext.super.fillAttributes(builder) + .put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex()); } IRequestAccumulationContext createRequestAccumulationContext(); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java deleted file mode 100644 index 802ef98d0..000000000 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTracingTest.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.opensearch.migrations.replay.kafka; - -import org.junit.jupiter.api.Test; - -public class KafkaTracingTest { - @Test - public void testTracingWorks() { - - } -} From 2072f695427d37ec38b23704a94b4450064fb66a Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 27 Jan 2024 18:12:00 -0500 Subject: [PATCH 76/94] Change path from otelcol to otelCollector and enable the collector and OS analytics engine by default. Signed-off-by: Greg Schohn --- .../lib/service-stacks/migration-analytics-stack.ts | 2 +- test/awsE2ESolutionSetup.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts index 1d187b3e1..40d3690ea 100644 --- a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts @@ -75,7 +75,7 @@ export class MigrationAnalyticsStack extends MigrationServiceCore { ] this.createService({ serviceName: `otel-collector`, - dockerFilePath: join(__dirname, "../../../../../", "TrafficCapture/dockerSolution/src/main/docker/otelcol"), + dockerFilePath: join(__dirname, "../../../../../", "TrafficCapture/dockerSolution/src/main/docker/otelCollector"), securityGroups: securityGroups, taskCpuUnits: 1024, taskMemoryLimitMiB: 4096, diff --git a/test/awsE2ESolutionSetup.sh b/test/awsE2ESolutionSetup.sh index 9d08a6ded..b968f7601 100755 --- a/test/awsE2ESolutionSetup.sh +++ b/test/awsE2ESolutionSetup.sh @@ -118,7 +118,7 @@ read -r -d '' cdk_context << EOM "mskBrokerNodeCount": 2, "openAccessPolicyEnabled": true, "domainRemovalPolicy": "DESTROY", - "migrationAnalyticsServiceEnabled": false, + "migrationAnalyticsServiceEnabled": true, "fetchMigrationEnabled": true, "sourceClusterEndpoint": "", "dpPipelineTemplatePath": "../../../test/dp_pipeline_aws_integ.yaml" From be689b5bb6b2f1f363da879819e11f6e873b00f7 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 27 Jan 2024 19:04:19 -0500 Subject: [PATCH 77/94] Split the implementations of fillAttributes into two. One for attributes that should be present in all sub-spans and another where the attributes should only be present within the current one (and not children) Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 12 ++++++---- .../tracing/IInstrumentationAttributes.java | 23 +++++++++---------- .../migrations/tracing/RootOtelContext.java | 14 ++++------- .../commoncontexts/IConnectionContext.java | 4 ++-- .../IHttpTransactionContext.java | 4 ++-- .../IInstrumentationAttributesTest.java | 8 +++---- .../replay/tracing/IReplayContexts.java | 9 ++++---- 7 files changed, 36 insertions(+), 38 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 7a6cdaa23..1ef4309fb 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -7,7 +7,6 @@ import lombok.NonNull; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; @@ -55,10 +54,15 @@ private MetricInstruments(Meter meter, String activityName) { public String getActivityName() { return "stream_flush_called"; } @Override - public AttributesBuilder fillAttributes(AttributesBuilder builder) { - return super.fillAttributes(builder) + public AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return super.fillAttributesForSpansBelow(builder) .put(TOPIC_ATTR, getTopic()) - .put(RECORD_ID_ATTR, getRecordId()) + .put(RECORD_ID_ATTR, getRecordId()); + } + + @Override + public AttributesBuilder fillExtraAttributesForThisSpan(AttributesBuilder builder) { + return super.fillExtraAttributesForThisSpan(builder) .put(RECORD_SIZE_ATTR, getRecordSize()); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 3b938bce0..297340284 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -13,12 +13,6 @@ import java.time.Duration; import java.util.ArrayDeque; -import java.util.ArrayList; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; public interface IInstrumentationAttributes { AttributeKey HAD_EXCEPTION_KEY = AttributeKey.booleanKey("hadException"); @@ -27,10 +21,6 @@ public interface IInstrumentationAttributes { default Span getCurrentSpan() { return null; } - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return builder; - } - Throwable getObservedExceptionToIncludeInMetrics(); default @NonNull Attributes getPopulatedMetricAttributes(AttributesBuilder attributesBuilder) { @@ -43,7 +33,6 @@ default Attributes getPopulatedSpanAttributes() { } default AttributesBuilder getPopulatedSpanAttributesBuilder() { - var builder = Attributes.builder(); var currentObj = this; // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts var stack = new ArrayDeque(); @@ -51,7 +40,17 @@ default AttributesBuilder getPopulatedSpanAttributesBuilder() { stack.addFirst(currentObj); currentObj = currentObj.getEnclosingScope(); } - return stack.stream().collect(Utils.foldLeft(builder, (b, iia)->iia.fillAttributes(b))); + var builder = stack.stream() + .collect(Utils.foldLeft(Attributes.builder(), (b, iia)->iia.fillAttributesForSpansBelow(b))); + return fillExtraAttributesForThisSpan(builder); + } + + default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return builder; + } + + default AttributesBuilder fillExtraAttributesForThisSpan(AttributesBuilder builder) { + return builder; } default void meterIncrementEvent(LongCounter c) { diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index c206e2273..2019bb9c1 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -21,7 +21,6 @@ import io.opentelemetry.semconv.ResourceAttributes; import lombok.Getter; import lombok.NonNull; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.Utils; @@ -34,8 +33,6 @@ public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; private final String scopeName; - @Getter - Exception observedExceptionToIncludeInMetrics; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @NonNull String serviceName) { @@ -112,6 +109,11 @@ public RootOtelContext(String scopeName, OpenTelemetry sdk) { this.scopeName = scopeName; } + @Override + public Exception getObservedExceptionToIncludeInMetrics() { + return null; + } + @Override public RootOtelContext getEnclosingScope() { return null; @@ -126,12 +128,6 @@ public MeterProvider getMeterProvider() { return getOpenTelemetry().getMeterProvider(); } - @Override - public AttributesBuilder fillAttributes(AttributesBuilder builder) { - assert observedExceptionToIncludeInMetrics == null; // nothing more to do than this check - return IRootOtelContext.super.fillAttributes(builder); - } - private static SpanBuilder addLinkedToBuilder(Stream linkedSpanContexts, SpanBuilder spanBuilder) { return Optional.ofNullable(linkedSpanContexts) .map(ss -> ss.collect(Utils.foldLeft(spanBuilder, (b, s) -> b.addLink(s.getSpanContext())))) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index f2e23afb5..6072eb346 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -16,8 +16,8 @@ public interface IConnectionContext extends IScopedInstrumentationAttributes { default IInstrumentationAttributes getEnclosingScope() { return null; } @Override - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IScopedInstrumentationAttributes.super.fillAttributes(builder) + default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return IScopedInstrumentationAttributes.super.fillAttributesForSpansBelow(builder) .put(CONNECTION_ID_ATTR, getConnectionId()) .put(NODE_ID_ATTR, getNodeId()); } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java index 7b3309a78..13acb7136 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IHttpTransactionContext.java @@ -10,8 +10,8 @@ public interface IHttpTransactionContext extends IScopedInstrumentationAttribute long getSourceRequestIndex(); @Override - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IScopedInstrumentationAttributes.super.fillAttributes(builder) + default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return IScopedInstrumentationAttributes.super.fillAttributesForSpansBelow(builder) .put(SOURCE_REQUEST_INDEX_KEY, getSourceRequestIndex()); } } diff --git a/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java index f3da40f50..d5fdd268e 100644 --- a/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java +++ b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java @@ -28,8 +28,8 @@ public CommonScopedMetricInstruments getMetrics() { } @Override - public AttributesBuilder fillAttributes(AttributesBuilder builder) { - return super.fillAttributes(builder) + public AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return super.fillAttributesForSpansBelow(builder) .put(OVERRIDE_KEY, "a-toBeOverridden") .put(UNIQUE_KEY, "a-toStay"); } @@ -51,8 +51,8 @@ public CommonScopedMetricInstruments getMetrics() { } @Override - public AttributesBuilder fillAttributes(AttributesBuilder builder) { - return super.fillAttributes(builder) + public AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return super.fillAttributesForSpansBelow(builder) .put(OVERRIDE_KEY, "b"); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 14ef5c670..d16ab1902 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -5,7 +5,6 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import org.opensearch.migrations.tracing.IWithTypedEnclosingScope; @@ -102,8 +101,8 @@ default String getActivityName() { String getRecordId(); @Override - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return IAccumulationScope.super.fillAttributes( + default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return IAccumulationScope.super.fillAttributesForSpansBelow( builder.put(RECORD_ID_KEY, getRecordId())); } @@ -172,8 +171,8 @@ default long replayerRequestIndex() { } @Override - default AttributesBuilder fillAttributes(AttributesBuilder builder) { - return org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext.super.fillAttributes(builder) + default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return org.opensearch.migrations.tracing.commoncontexts.IHttpTransactionContext.super.fillAttributesForSpansBelow(builder) .put(REPLAYER_REQUEST_INDEX_KEY, replayerRequestIndex()); } From 607ff054ec5c7121009dbdc5d9d16146549c8b84 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 27 Jan 2024 21:45:34 -0500 Subject: [PATCH 78/94] Fix the dependencies for logging leaves and add 'processors' and 'receivers' for metrics, traces and logs templates. Signed-off-by: Greg Schohn --- .../otelConfigs/configSnippets/logs.yaml | 4 +++- .../otelConfigs/configSnippets/metrics.yaml | 4 +++- .../otelConfigs/configSnippets/traces.yaml | 4 +++- .../otelConfigs/dependencies.yml | 18 ++++++++++++------ .../configs/otel-config-everything.yaml | 2 +- ...el-config-prometheus-jaeger-opensearch.yaml | 2 +- .../otel-image-default-config.yml | 14 ++++++++++++++ .../otelCollector/otel-config-debug-only.yaml | 6 ++++++ 8 files changed, 43 insertions(+), 11 deletions(-) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-image-default-config.yml diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml index 5dfea614a..cb93ed1e6 100644 --- a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/logs.yaml @@ -1,4 +1,6 @@ service: pipelines: logs: - receivers: [ otlp ] \ No newline at end of file + receivers: [ otlp ] + processors: + exporters: \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml index cc7b29457..6f9ea4b5d 100644 --- a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/metrics.yaml @@ -1,4 +1,6 @@ service: pipelines: metrics: - receivers: [ otlp ] \ No newline at end of file + receivers: [ otlp ] + processors: + exporters: \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml index 8c7e89e60..6fcd87eac 100644 --- a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/traces.yaml @@ -1,4 +1,6 @@ service: pipelines: traces: - receivers: [ otlp ] \ No newline at end of file + receivers: [ otlp ] + processors: + exporters: \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml b/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml index 4cd9c4873..a98a4cf9a 100644 --- a/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml +++ b/TrafficCapture/dockerSolution/otelConfigs/dependencies.yml @@ -3,10 +3,20 @@ base: batchMetrics: prometheus: awsCloudWatch: + debugMetricsDetailed: + infoMetricsDetailed: traces: batchTraces: awsXRay: jaeger: + debugTracesDetailed: + infoTracessDetailed: + logs: + openSearch: + openSearchAws: + openSearchLocal: + debugLogsDetailed: + infoLogsDetailed: basicAuthClient: openSearchLocal: batch: @@ -14,16 +24,12 @@ base: batchTraces: debugDetailed: debugMetricsDetailed: - debugTracesDetailed: debugLogsDetailed: + debugTracesDetailed: debugInfo: debugMetricsInfo: - debugTracesInfo: debugLogsInfo: + debugTracesInfo: healthCheck: - logs: - openSearch: - openSearchAws: - openSearchLocal: pprof: zpages: \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml index 9e896d414..f0ee7d695 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml @@ -51,13 +51,13 @@ exporters: sampling_initial: 5 sampling_thereafter: 200 opensearch: - namespace: migrations http: endpoint: "https://opensearchanalytics:9200" auth: authenticator: basicauth/client tls: insecure_skip_verify: true + namespace: migrations awsemf: namespace: 'TrafficCaptureReplay' awsxray: diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml index 84ba267de..7e418eba8 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml @@ -51,13 +51,13 @@ exporters: sampling_initial: 5 sampling_thereafter: 200 opensearch: - namespace: migrations http: endpoint: "https://opensearchanalytics:9200" auth: authenticator: basicauth/client tls: insecure_skip_verify: true + namespace: migrations prometheus: endpoint: "0.0.0.0:8889" send_timestamps: true diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-image-default-config.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-image-default-config.yml new file mode 100644 index 000000000..58706136c --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-image-default-config.yml @@ -0,0 +1,14 @@ +version: '3.7' +services: + + otel-collector: + image: migrations/otel_collector:latest + networks: + - migrations + volumes: + - ./composeExtensions/configs/otel-config-prometheus-jaeger-opensearch.yaml:/etc/otel-config-prometheus-jaeger-opensearch.yaml + ports: + - "1888:1888" # pprof extension + - "13133:13133" # health_check extension + - "55679:55679" # zpages extension + - "4317:4317" # otlp receiver diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml index 52b1a176e..baea39abc 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-debug-only.yaml @@ -18,8 +18,14 @@ service: extensions: [zpages, pprof, health_check] pipelines: traces: + receivers: [otlp] + processors: exporters: [logging] metrics: + receivers: [otlp] + processors: exporters: [logging] logs: + receivers: [otlp] + processors: exporters: [logging] From d480ed893d28f237eed3957c74e58398a866b5bb Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 27 Jan 2024 21:56:29 -0500 Subject: [PATCH 79/94] Setting the docker command for the otel-collector service to use the aws config file Signed-off-by: Greg Schohn --- .../lib/service-stacks/migration-analytics-stack.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts index 40d3690ea..0b29acafc 100644 --- a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts @@ -76,6 +76,7 @@ export class MigrationAnalyticsStack extends MigrationServiceCore { this.createService({ serviceName: `otel-collector`, dockerFilePath: join(__dirname, "../../../../../", "TrafficCapture/dockerSolution/src/main/docker/otelCollector"), + dockerImageCommand: ["--config=/etc/otel-config-aws.yaml"], securityGroups: securityGroups, taskCpuUnits: 1024, taskMemoryLimitMiB: 4096, From 59db42f39114dab1c5851718565aeaff7c57087f Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 27 Jan 2024 23:00:36 -0500 Subject: [PATCH 80/94] Set the permissions for the otel container to write to cloudwatch and xray Signed-off-by: Greg Schohn --- .../lib/common-utilities.ts | 22 +++++++++++++++++++ .../migration-analytics-stack.ts | 7 +++++- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/deployment/cdk/opensearch-service-migration/lib/common-utilities.ts b/deployment/cdk/opensearch-service-migration/lib/common-utilities.ts index 80ad70a64..a28fa2b61 100644 --- a/deployment/cdk/opensearch-service-migration/lib/common-utilities.ts +++ b/deployment/cdk/opensearch-service-migration/lib/common-utilities.ts @@ -77,6 +77,28 @@ export function createMSKProducerIAMPolicies(scope: Construct, region: string, a return [mskClusterConnectPolicy, mskTopicProducerPolicy] } +export function createAwsDistroForOtelPushInstrumentationPolicy(): PolicyStatement { + // see https://aws-otel.github.io/docs/setup/permissions + return new PolicyStatement( { + effect: Effect.ALLOW, + resources: ["*"], + actions: [ + "logs:PutLogEvents", + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:DescribeLogStreams", + "logs:DescribeLogGroups", + "logs:PutRetentionPolicy", + "xray:PutTraceSegments", + "xray:PutTelemetryRecords", + "xray:GetSamplingRules", + "xray:GetSamplingTargets", + "xray:GetSamplingStatisticSummaries", + "ssm:GetParameters" + ] + }) +} + export function createDefaultECSTaskRole(scope: Construct, serviceName: string): Role { const serviceTaskRole = new Role(scope, `${serviceName}-TaskRole`, { assumedBy: new ServicePrincipal('ecs-tasks.amazonaws.com'), diff --git a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts index 0b29acafc..ca13341c9 100644 --- a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-analytics-stack.ts @@ -11,6 +11,7 @@ import {Construct} from "constructs"; import {join} from "path"; import {MigrationServiceCore} from "./migration-service-core"; import {StringParameter} from "aws-cdk-lib/aws-ssm"; +import {createAwsDistroForOtelPushInstrumentationPolicy} from "../common-utilities"; export interface MigrationAnalyticsProps extends StackPropsExt { readonly vpc: IVpc, @@ -72,7 +73,10 @@ export class MigrationAnalyticsStack extends MigrationServiceCore { let securityGroups = [ SecurityGroup.fromSecurityGroupId(this, "serviceConnectSG", StringParameter.valueForStringParameter(this, `/migration/${props.stage}/${props.defaultDeployId}/serviceConnectSecurityGroupId`)), migrationAnalyticsSecurityGroup - ] + ] + + const servicePolicies = [createAwsDistroForOtelPushInstrumentationPolicy()] + this.createService({ serviceName: `otel-collector`, dockerFilePath: join(__dirname, "../../../../../", "TrafficCapture/dockerSolution/src/main/docker/otelCollector"), @@ -82,6 +86,7 @@ export class MigrationAnalyticsStack extends MigrationServiceCore { taskMemoryLimitMiB: 4096, portMappings: [otelCollectorPort, otelHealthCheckPort], serviceConnectServices: [serviceConnectServiceCollector, serviceConnectServiceHealthCheck], + taskRolePolicies: servicePolicies, environment: { "ANALYTICS_DOMAIN_ENDPOINT": analyticsDomainEndpoint }, From 76c8c31484b41e7820942a14a90154f45bd795f2 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 31 Jan 2024 12:29:42 -0500 Subject: [PATCH 81/94] Minor cleanup Signed-off-by: Greg Schohn --- .../otel-config-everything-compose.yaml | 90 ------------------- .../docker/composeExtensions/otel-aws.yml | 2 - .../netty/ProxyChannelInitializer.java | 2 +- .../NettyPacketToHttpConsumer.java | 4 +- .../http/RequestPipelineOrchestrator.java | 14 +-- ...gHandler.java => ReadMeteringHandler.java} | 2 +- 6 files changed, 11 insertions(+), 103 deletions(-) delete mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml rename TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/{ReadMeteringingHandler.java => ReadMeteringHandler.java} (91%) diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml deleted file mode 100644 index 654c432c1..000000000 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything-compose.yaml +++ /dev/null @@ -1,90 +0,0 @@ -receivers: - otlp: - protocols: - grpc: - -processors: - batch: - timeout: 10s - send_batch_size: 8192 - send_batch_max_size: 10000 - attributes: - # This processor is currently renaming two attributes - # that are prefixed with `log4j.context_data.` to the base attribute name - # to make queries within OpenSearch clearer. Both the `insert from_attribute` - # and the `delete` actions will fail silently if the attribute is not present, - # which means that these are safe for events that both do and don't have these - # attributes. This pattern should be extended to all of our standard attributes. - actions: - - key: event - from_attribute: log4j.context_data.event - action: insert - - key: log4j.context_data.event - action: delete - - key: channel_id - from_attribute: log4j.context_data.channel_id - action: insert - - key: log4j.context_data.channel_id - action: delete - # The following actions convert various should-be-int strings to ints - - key: log4j.context_data.source_http_status - action: convert - converted_type: int - - key: log4j.context_data.target_http_status - action: convert - converted_type: int - - key: log4j.context_data.http_status_match - action: convert - converted_type: int - -extensions: - basicauth/client: - client_auth: - username: "admin" - password: "admin" - health_check: - zpages: - endpoint: :55679 - -exporters: - logging: - verbosity: debug - - awsemf: - namespace: 'TrafficCaptureReplay' - prometheus: - endpoint: "0.0.0.0:8889" - send_timestamps: true - metric_expiration: 5m - enable_open_metrics: true - - awsxray: - otlp/jaeger: # Jaeger supports OTLP directly. The default port for OTLP/gRPC is 4317 - endpoint: jaeger:4317 - tls: - insecure: true - - opensearch: - namespace: migrations - http: - endpoint: "https://opensearchanalytics:9200" - auth: - authenticator: basicauth/client - tls: - insecure_skip_verify: true - -service: - extensions: [ zpages, health_check, basicauth/client ] - pipelines: - traces: - receivers: [ otlp ] - processors: [ batch ] - exporters: [ otlp/jaeger, awsxray ] - metrics: - receivers: [ otlp ] - processors: [ batch ] - exporters: [ logging, prometheus, awsemf ] - logs: - receivers: [otlp] - processors: [attributes] - exporters: [logging, debug, opensearch] \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml index 39aa7b81b..cbe7b5764 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml @@ -11,8 +11,6 @@ services: ports: - "13133:13133" # health_check extension - "4317:4317" # otlp receiver - depends_on: - - jaeger environment: - ANALYTICS_DOMAIN_ENDPOINT=opensearchanalytics # use the local container for compose here - AWS_REGION=us-east-1 diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index c4df0e019..bb9f0ed29 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -50,7 +50,7 @@ protected void initChannel(SocketChannel ch) throws IOException { } var connectionId = ch.id().asLongText(); - ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler(rootContext, + ch.pipeline().addLast(new ConditionallyReliableLoggingHttpHandler<>(rootContext, "", connectionId, connectionCaptureFactory, requestCapturePredicate, this::shouldGuaranteeMessageOffloading)); ch.pipeline().addLast(new FrontsideHandler(backsideConnectionPool)); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java index 6c953d4e1..922a5a94d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumer.java @@ -23,7 +23,7 @@ import org.opensearch.migrations.coreutils.MetricsEvent; import org.opensearch.migrations.coreutils.MetricsLogger; import org.opensearch.migrations.replay.AggregatedRawResponse; -import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; +import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringHandler; import org.opensearch.migrations.replay.datahandlers.http.helpers.WriteMeteringHandler; import org.opensearch.migrations.replay.netty.BacksideHttpWatcherHandler; import org.opensearch.migrations.replay.netty.BacksideSnifferHandler; @@ -186,7 +186,7 @@ private void activateChannelForThisConsumer() { } getParentContext().onBytesSent(size); })); - pipeline.addFirst(READ_COUNT_WATCHER_HANDLER_NAME, new ReadMeteringingHandler(size->{ + pipeline.addFirst(READ_COUNT_WATCHER_HANDLER_NAME, new ReadMeteringHandler(size->{ // client side, so this is the response if (size == 0) { return; } if (!(this.currentRequestContextUnion instanceof IReplayContexts.IReceivingHttpResponseContext)) { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java index 9840d4d80..3655b57f4 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/RequestPipelineOrchestrator.java @@ -11,7 +11,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datahandlers.IPacketFinalizingConsumer; import org.opensearch.migrations.replay.datahandlers.http.helpers.LastHttpContentListener; -import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringingHandler; +import org.opensearch.migrations.replay.datahandlers.http.helpers.ReadMeteringHandler; import org.opensearch.migrations.replay.tracing.IReplayContexts; import org.opensearch.migrations.transform.IAuthTransformer; import org.opensearch.migrations.transform.IAuthTransformerFactory; @@ -87,7 +87,7 @@ void addJsonParsingHandlers(ChannelHandlerContext ctx, void addInitialHandlers(ChannelPipeline pipeline, IJsonTransformer transformer) { pipeline.addFirst(HTTP_REQUEST_DECODER_NAME, new HttpRequestDecoder()); addLoggingHandler(pipeline, "A"); - pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::aggregateInputChunk)); + pipeline.addLast(new ReadMeteringHandler(httpTransactionContext::aggregateInputChunk)); // IN: Netty HttpRequest(1) + HttpContent(1) blocks (which may be compressed) + EndOfInput + ByteBuf // OUT: ByteBufs(1) OR Netty HttpRequest(1) + HttpJsonMessage(1) with only headers PLUS + HttpContent(1) blocks // Note1: original Netty headers are preserved so that HttpContentDecompressor can work appropriately. @@ -110,11 +110,11 @@ void addContentParsingHandlers(ChannelHandlerContext ctx, httpTransactionContext.onPayloadParse(); log.debug("Adding content parsing handlers to pipeline"); var pipeline = ctx.pipeline(); - pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onPayloadBytesIn)); + pipeline.addLast(new ReadMeteringHandler(httpTransactionContext::onPayloadBytesIn)); // IN: Netty HttpRequest(1) + HttpJsonMessage(1) with headers + HttpContent(1) blocks (which may be compressed) // OUT: Netty HttpRequest(2) + HttpJsonMessage(1) with headers + HttpContent(2) uncompressed blocks pipeline.addLast(new HttpContentDecompressor()); - pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onUncompressedBytesIn)); + pipeline.addLast(new ReadMeteringHandler(httpTransactionContext::onUncompressedBytesIn)); if (transformer != null) { httpTransactionContext.onJsonPayloadParseRequired(); log.debug("Adding JSON handlers to pipeline"); @@ -135,11 +135,11 @@ void addContentParsingHandlers(ChannelHandlerContext ctx, addLoggingHandler(pipeline, "G"); } pipeline.addLast(new LastHttpContentListener(httpTransactionContext::onPayloadParseSuccess)); - pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onUncompressedBytesOut)); + pipeline.addLast(new ReadMeteringHandler(httpTransactionContext::onUncompressedBytesOut)); // IN: Netty HttpRequest(2) + HttpJsonMessage(3) with headers only + HttpContent(3) blocks // OUT: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + HttpContent(4) blocks pipeline.addLast(new NettyJsonContentCompressor()); - pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::onFinalBytesOut)); + pipeline.addLast(new ReadMeteringHandler(httpTransactionContext::onFinalBytesOut)); addLoggingHandler(pipeline, "H"); // IN: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + HttpContent(4) blocks + EndOfInput // OUT: Netty HttpRequest(3) + HttpJsonMessage(4) with headers only + ByteBufs(2) @@ -153,7 +153,7 @@ void addBaselineHandlers(ChannelPipeline pipeline) { // IN: ByteBufs(2) + HttpJsonMessage(4) with headers only + HttpContent(1) (if the repackaging handlers were skipped) // OUT: ByteBufs(3) which are sized similarly to how they were received pipeline.addLast(new NettyJsonToByteBufHandler(Collections.unmodifiableList(chunkSizes))); - pipeline.addLast(new ReadMeteringingHandler(httpTransactionContext::aggregateOutputChunk)); + pipeline.addLast(new ReadMeteringHandler(httpTransactionContext::aggregateOutputChunk)); // IN: ByteBufs(3) // OUT: nothing - terminal! ByteBufs are routed to the packet handler! addLoggingHandler(pipeline, "K"); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringHandler.java similarity index 91% rename from TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java rename to TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringHandler.java index 87d38cd1e..ea81036fe 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringingHandler.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datahandlers/http/helpers/ReadMeteringHandler.java @@ -9,7 +9,7 @@ import java.util.function.IntConsumer; @AllArgsConstructor -public class ReadMeteringingHandler extends ChannelInboundHandlerAdapter { +public class ReadMeteringHandler extends ChannelInboundHandlerAdapter { private final IntConsumer sizeConsumer; @Override From 2cc67fd1fc037fbe9505ef0c4c7ab7a6e1e31663 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 31 Jan 2024 22:49:57 -0500 Subject: [PATCH 82/94] Fix the runTestBenchmarks script to work when the endpoint uses http instead of https. I've also removed the 'no-ssl' option and deduce it from the protocol Signed-off-by: Greg Schohn --- .../migrationConsole/runTestBenchmarks.sh | 34 +++++++++---------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/runTestBenchmarks.sh b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/runTestBenchmarks.sh index acec04354..dcb4bc919 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/runTestBenchmarks.sh +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/runTestBenchmarks.sh @@ -5,7 +5,6 @@ endpoint="https://capture-proxy-es:9200" auth_user="admin" auth_pass="admin" no_auth=false -no_ssl=false # Override default values with optional command-line arguments while [[ $# -gt 0 ]]; do @@ -16,12 +15,12 @@ while [[ $# -gt 0 ]]; do shift shift ;; - --auth_user) + --auth-user) auth_user="$2" shift shift ;; - --auth_pass) + --auth-pass) auth_pass="$2" shift shift @@ -30,31 +29,30 @@ while [[ $# -gt 0 ]]; do no_auth=true shift ;; - --no-ssl) - no_ssl=true - shift - ;; *) shift ;; esac done -# Populate auth string -if [ "$no_auth" = true ]; then - auth_string="" -else - auth_string=",basic_auth_user:${auth_user},basic_auth_password:${auth_pass}" + +# Initialize an empty array to hold non-empty values +options=() + +if [[ "$endpoint" == https:* ]]; then + options+=("use_ssl:true,verify_certs:false") fi -if [ "$no_ssl" = true ]; then - base_options_string="" -else - base_options_string="use_ssl:true,verify_certs:false" + +# Populate auth string +if [ "$no_auth" = false ]; then + options+=("basic_auth_user:${auth_user},basic_auth_password:${auth_pass}") fi -# Construct the final client options string -client_options="${base_options_string}${auth_string}" +# Join the non-empty values using a comma +client_options=$(IFS=,; echo "${options[*]}") + +set -o xtrace echo "Running opensearch-benchmark workloads against ${endpoint}" echo "Running opensearch-benchmark w/ 'geonames' workload..." && From 9425ab68c5e58369c1bc532cf6a7050b89962aec Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sat, 3 Feb 2024 14:22:39 -0500 Subject: [PATCH 83/94] Aesthetic formatting changes Signed-off-by: Greg Schohn --- .../docker/composeExtensions/otel-aws.yml | 4 +-- .../replay/tracing/ReplayContexts.java | 27 ++++++++++++------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml index cbe7b5764..18366e059 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml @@ -6,11 +6,11 @@ services: command: ["--config=/etc/otel-config-aws.yaml", "${OTELCOL_ARGS}"] networks: - migrations - volumes: - - ~/.aws:/home/aoc/.aws ports: - "13133:13133" # health_check extension - "4317:4317" # otlp receiver + volumes: + - ~/.aws:/home/aoc/.aws environment: - ANALYTICS_DOMAIN_ENDPOINT=opensearchanalytics # use the local container for compose here - AWS_REGION=us-east-1 diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index e2769b6af..f7a376d8a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -265,7 +265,8 @@ public IReplayContexts.IChannelKeyContext getLogicalEnclosingScope() { } public static class RequestAccumulationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestAccumulationContext { public RequestAccumulationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -288,7 +289,8 @@ private MetricInstruments(Meter meter, String activityName) { } public static class ResponseAccumulationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IResponseAccumulationContext { public ResponseAccumulationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -311,7 +313,8 @@ private MetricInstruments(Meter meter, String activityName) { } public static class RequestTransformationContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestTransformationContext { public RequestTransformationContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -456,7 +459,8 @@ public void aggregateOutputChunk(int sizeInBytes) { } public static class ScheduledContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IScheduledContext { private final Instant scheduledFor; @@ -491,7 +495,8 @@ public void sendMeterEventsForEnd() { } public static class TargetRequestContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.ITargetRequestContext { public TargetRequestContext(HttpTransactionContext enclosingScope) { super(enclosingScope); @@ -552,7 +557,8 @@ public IReplayContexts.IWaitingForHttpResponseContext createWaitingForResponseCo } public static class RequestSendingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IRequestSendingContext { public RequestSendingContext(TargetRequestContext enclosingScope) { super(enclosingScope); @@ -575,7 +581,8 @@ private MetricInstruments(Meter meter, String activityName) { } public static class WaitingForHttpResponseContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IWaitingForHttpResponseContext { public WaitingForHttpResponseContext(TargetRequestContext enclosingScope) { super(enclosingScope); @@ -599,7 +606,8 @@ private MetricInstruments(Meter meter, String activityName) { } public static class ReceivingHttpResponseContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.IReceivingHttpResponseContext { public ReceivingHttpResponseContext(TargetRequestContext enclosingScope) { super(enclosingScope); @@ -625,7 +633,8 @@ private MetricInstruments(Meter meter, String activityName) { @Getter @Setter public static class TupleHandlingContext - extends DirectNestedSpanContext + extends DirectNestedSpanContext implements IReplayContexts.ITupleHandlingContext { Integer sourceStatus; Integer targetStatus; From deb19a462c7b1faf1a4a81d0c217eab81579b124 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Sun, 4 Feb 2024 17:08:57 -0500 Subject: [PATCH 84/94] IInstrumentationAttributes no longer has scope related functionality. That's been pushed down to its subclass IScopedInstrumentationAttributes. Attributes for spans are now filled when the span is closed rather than when it is created. This gives less leeway to being able to override the value w/out changing the value within the context class, but the context class should be the ground truth, record of value, so this seems like it's the right behavior anyway. With those changes, I did some cleanup on the attribute values that were being tracked for tuple comparison. Now status codes are tracked as metric AND span attributes. That should make it MUCH easier to search metrics for specific patterns that popped up in metrics. Signed-off-by: Greg Schohn --- .../tracing/ConnectionContext.java | 6 +- .../tracing/BaseNestedSpanContext.java | 50 +------------ .../migrations/tracing/BaseSpanContext.java | 73 ++++++++++++++++++ .../tracing/CommonMetricInstruments.java | 13 ++++ .../CommonScopedMetricInstruments.java | 25 ++++--- .../tracing/DirectNestedSpanContext.java | 2 +- .../tracing/IInstrumentConstructor.java | 3 +- .../tracing/IInstrumentationAttributes.java | 45 ++--------- .../IScopedInstrumentationAttributes.java | 74 +++++++++++++++++-- .../migrations/tracing/RootOtelContext.java | 25 ++++--- .../commoncontexts/IConnectionContext.java | 2 +- .../IInstrumentationAttributesTest.java | 13 +++- .../proxyserver/RootCaptureContext.java | 19 +++-- .../replay/ParsedHttpMessagesAsDicts.java | 4 +- .../replay/tracing/IReplayContexts.java | 8 +- .../replay/tracing/KafkaConsumerContexts.java | 15 ++-- .../replay/tracing/ReplayContexts.java | 59 +++++++++------ .../replay/tracing/RootReplayerContext.java | 6 +- .../replay/tracing/TrafficSourceContexts.java | 5 +- .../FullReplayerWithTracingChecksTest.java | 2 +- 20 files changed, 284 insertions(+), 165 deletions(-) create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java create mode 100644 TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonMetricInstruments.java diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 15db7ea94..4fdfb5776 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -4,12 +4,12 @@ import io.opentelemetry.api.metrics.Meter; import lombok.Getter; import lombok.NonNull; -import org.opensearch.migrations.tracing.BaseNestedSpanContext; +import org.opensearch.migrations.tracing.BaseSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.IHasRootInstrumentationScope; import org.opensearch.migrations.tracing.commoncontexts.IConnectionContext; -public class ConnectionContext extends BaseNestedSpanContext +public class ConnectionContext extends BaseSpanContext implements IConnectionContext, IHasRootInstrumentationScope { public static final String ACTIVE_CONNECTION = "activeConnection"; @@ -24,7 +24,7 @@ public class ConnectionContext extends BaseNestedSpanContext - implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, IHasRootInstrumentationScope, AutoCloseable { + extends BaseSpanContext { final T enclosingScope; - @Getter - final long startNanoTime; - @Getter - private Span currentSpan; - @Getter - private final S rootInstrumentationScope; - @Getter - Throwable observedExceptionToIncludeInMetrics; - - protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder attributesBuilder, - AttributeKey key, Optional value) { - return value.map(v -> attributesBuilder.put(key, v)).orElse(attributesBuilder); - } protected BaseNestedSpanContext(S rootScope, T enclosingScope) { + super(rootScope); rootScope.onContextCreated(this); this.enclosingScope = enclosingScope; - this.startNanoTime = System.nanoTime(); - this.rootInstrumentationScope = rootScope; - } - - @Override - public void endSpan() { - IScopedInstrumentationAttributes.super.endSpan(); - rootInstrumentationScope.onContextClosed(this); } @Override - public IInstrumentationAttributes getEnclosingScope() { + public IScopedInstrumentationAttributes getEnclosingScope() { return enclosingScope; } @@ -50,27 +29,4 @@ public T getImmediateEnclosingScope() { return enclosingScope; } - protected void initializeSpan() { - initializeSpan(Attributes.builder()); - } - - protected void initializeSpan(AttributesBuilder attributesBuilder) { - initializeSpan(null, attributesBuilder); - } - - protected void initializeSpan(Stream linkedSpans, AttributesBuilder attributesBuilder) { - initializeSpan(rootInstrumentationScope.buildSpan(this, getActivityName(), - linkedSpans, attributesBuilder)); - } - - public void initializeSpan(@NonNull Span s) { - assert currentSpan == null : "only expect to set the current span once"; - currentSpan = s; - } - - @Override - public void addException(Throwable e) { - IScopedInstrumentationAttributes.super.addException(e); - observedExceptionToIncludeInMetrics = e; - } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java new file mode 100644 index 000000000..8564814f4 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java @@ -0,0 +1,73 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.Span; +import lombok.Getter; +import lombok.NonNull; + +import java.util.Optional; +import java.util.stream.Stream; + +public abstract class BaseSpanContext + implements IScopedInstrumentationAttributes, IWithStartTimeAndAttributes, IHasRootInstrumentationScope, AutoCloseable { + @Getter + protected final S rootInstrumentationScope; + @Getter + final long startNanoTime; + @Getter + Throwable observedExceptionToIncludeInMetrics; + @Getter + private Span currentSpan; + + public BaseSpanContext(S rootScope) { + this.startNanoTime = System.nanoTime(); + this.rootInstrumentationScope = rootScope; + } + + protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder attributesBuilder, + AttributeKey key, Optional value) { + return value.map(v -> attributesBuilder.put(key, v)).orElse(attributesBuilder); + } + + @Override + public void endSpan() { + IScopedInstrumentationAttributes.super.endSpan(); + rootInstrumentationScope.onContextClosed(this); + } + + protected void initializeSpan() { + initializeSpanWithLinkedSpans(null); + } + + protected void initializeSpanWithLinkedSpans(Stream linkedSpans) { + initializeSpan(rootInstrumentationScope.buildSpan(this, getActivityName(), linkedSpans)); + } + + public void initializeSpan(@NonNull Span s) { + assert currentSpan == null : "only expect to set the current span once"; + currentSpan = s; + } + + @Override + public void addException(Throwable e) { + IScopedInstrumentationAttributes.super.addException(e); + observedExceptionToIncludeInMetrics = e; + } + + public long getStartNanoTime() { + return this.startNanoTime; + } + + public Span getCurrentSpan() { + return this.currentSpan; + } + + public S getRootInstrumentationScope() { + return this.rootInstrumentationScope; + } + + public Throwable getObservedExceptionToIncludeInMetrics() { + return this.observedExceptionToIncludeInMetrics; + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonMetricInstruments.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonMetricInstruments.java new file mode 100644 index 000000000..a93777a9b --- /dev/null +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonMetricInstruments.java @@ -0,0 +1,13 @@ +package org.opensearch.migrations.tracing; + +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.Meter; + +public class CommonMetricInstruments { + final LongCounter exceptionCounter; + + public CommonMetricInstruments(Meter meter, String activityName) { + exceptionCounter = meter + .counterBuilder(activityName + "ExceptionCount").build(); + } +} diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java index ec614e56a..957f17fee 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java @@ -11,9 +11,8 @@ @Slf4j -public class CommonScopedMetricInstruments { +public class CommonScopedMetricInstruments extends CommonMetricInstruments { final LongCounter contextCounter; - final LongCounter exceptionCounter; final DoubleHistogram contextDuration; public CommonScopedMetricInstruments(Meter meter, String activityName) { @@ -27,22 +26,30 @@ public CommonScopedMetricInstruments(Meter meter, String activityName, private static List getBuckets(String activityName, double firstBucketSize, double lastBucketCeiling) { + var buckets = getExponentialBucketsBetween(firstBucketSize, lastBucketCeiling, 2.0); + log.atInfo().setMessage(() -> "Setting buckets for " + activityName + " to " + + buckets.stream().map(x -> "" + x).collect(Collectors.joining(",", "[", "]"))).log(); + return buckets; + } + + private static List getExponentialBucketsBetween(double firstBucketSize, double lastBucketCeiling) { + return getExponentialBucketsBetween(firstBucketSize, lastBucketCeiling, 2.0); + } + + private static List getExponentialBucketsBetween(double firstBucketSize, double lastBucketCeiling, + double rate) { double[] bucketBoundary = new double[]{firstBucketSize}; - var buckets = DoubleStream.generate(() -> { + return DoubleStream.generate(() -> { var tmp = bucketBoundary[0]; - bucketBoundary[0] *= 2.0; + bucketBoundary[0] *= rate; return tmp; }).takeWhile(v -> v <= lastBucketCeiling).boxed().collect(Collectors.toList()); - log.atInfo().setMessage(() -> "Setting buckets for " + activityName + " to " + - buckets.stream().map(x -> "" + x).collect(Collectors.joining(",", "[", "]"))).log(); - return buckets; } public CommonScopedMetricInstruments(Meter meter, String activityName, List buckets) { + super(meter, activityName); contextCounter = meter .counterBuilder(activityName + "Count").build(); - exceptionCounter = meter - .counterBuilder(activityName + "ExceptionCount").build(); var durationBuilder = meter .histogramBuilder(activityName + "Duration") .setUnit("ms"); diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java index 6a2376548..5d6c59a6e 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/DirectNestedSpanContext.java @@ -1,7 +1,7 @@ package org.opensearch.migrations.tracing; public abstract class DirectNestedSpanContext, + T extends IScopedInstrumentationAttributes & IHasRootInstrumentationScope, L> extends BaseNestedSpanContext implements IWithTypedEnclosingScope diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java index 82e193190..4be0a3b65 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentConstructor.java @@ -7,8 +7,7 @@ import java.util.stream.Stream; public interface IInstrumentConstructor { - @NonNull Span buildSpan(IInstrumentationAttributes forScope, String spanName, Stream linkedSpans, - AttributesBuilder attributesBuilder); + @NonNull Span buildSpan(IScopedInstrumentationAttributes forScope, String spanName, Stream linkedSpans); /** * For debugging, this will be overridden to track creation and termination of spans diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java index 297340284..e89570036 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java @@ -19,7 +19,7 @@ public interface IInstrumentationAttributes { IInstrumentationAttributes getEnclosingScope(); - default Span getCurrentSpan() { return null; } + CommonMetricInstruments getMetrics(); Throwable getObservedExceptionToIncludeInMetrics(); @@ -28,29 +28,8 @@ public interface IInstrumentationAttributes { return e == null ? attributesBuilder.build() : attributesBuilder.put(HAD_EXCEPTION_KEY, true).build(); } - default Attributes getPopulatedSpanAttributes() { - return getPopulatedSpanAttributesBuilder().build(); - } - - default AttributesBuilder getPopulatedSpanAttributesBuilder() { - var currentObj = this; - // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts - var stack = new ArrayDeque(); - while (currentObj != null) { - stack.addFirst(currentObj); - currentObj = currentObj.getEnclosingScope(); - } - var builder = stack.stream() - .collect(Utils.foldLeft(Attributes.builder(), (b, iia)->iia.fillAttributesForSpansBelow(b))); - return fillExtraAttributesForThisSpan(builder); - } - - default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { - return builder; - } - - default AttributesBuilder fillExtraAttributesForThisSpan(AttributesBuilder builder) { - return builder; + default void addException(Throwable e) { + meterIncrementEvent(getMetrics().exceptionCounter); } default void meterIncrementEvent(LongCounter c) { @@ -62,9 +41,7 @@ default void meterIncrementEvent(LongCounter c, long increment) { } default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { - try (var scope = new NullableExemplarScope(getCurrentSpan())) { - c.add(increment, getPopulatedMetricAttributes(attributesBuilder)); - } + c.add(increment, getPopulatedMetricAttributes(attributesBuilder)); } default void meterDeltaEvent(LongUpDownCounter c, long delta) { @@ -72,10 +49,8 @@ default void meterDeltaEvent(LongUpDownCounter c, long delta) { } default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { - try (var scope = new NullableExemplarScope(getCurrentSpan())) { - var attributes = getPopulatedMetricAttributes(attributesBuilder); - c.add(delta, attributes); - } + var attributes = getPopulatedMetricAttributes(attributesBuilder); + c.add(delta, attributes); } default void meterHistogramMillis(DoubleHistogram histogram, Duration value) { @@ -91,9 +66,7 @@ default void meterHistogram(DoubleHistogram histogram, double value) { } default void meterHistogram(DoubleHistogram histogram, double value, AttributesBuilder attributesBuilder) { - try (var scope = new NullableExemplarScope(getCurrentSpan())) { - histogram.record(value, getPopulatedMetricAttributes(attributesBuilder)); - } + histogram.record(value, getPopulatedMetricAttributes(attributesBuilder)); } default void meterHistogram(LongHistogram histogram, long value) { @@ -101,9 +74,7 @@ default void meterHistogram(LongHistogram histogram, long value) { } default void meterHistogram(LongHistogram histogram, long value, AttributesBuilder attributesBuilder) { - try (var scope = new NullableExemplarScope(getCurrentSpan())) { - histogram.record(value, getPopulatedMetricAttributes(attributesBuilder)); - } + histogram.record(value, getPopulatedMetricAttributes(attributesBuilder)); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java index dec7c095f..44f33423e 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java @@ -1,9 +1,16 @@ package org.opensearch.migrations.tracing; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.trace.Span; import lombok.NonNull; +import org.opensearch.migrations.Utils; + +import java.util.ArrayDeque; public interface IScopedInstrumentationAttributes extends IWithStartTimeAndAttributes, AutoCloseable { @@ -11,10 +18,40 @@ public interface IScopedInstrumentationAttributes String getActivityName(); @Override - @NonNull Span getCurrentSpan(); + IScopedInstrumentationAttributes getEnclosingScope(); + @Override CommonScopedMetricInstruments getMetrics(); + @NonNull Span getCurrentSpan(); + + default Attributes getPopulatedSpanAttributes() { + return getPopulatedSpanAttributesBuilder().build(); + } + + default AttributesBuilder getPopulatedSpanAttributesBuilder() { + IInstrumentationAttributes currentObj = this; + // reverse the order so that the lowest attribute scopes will overwrite the upper ones if there were conflicts + var stack = new ArrayDeque(); + while (currentObj != null) { + if (currentObj instanceof IScopedInstrumentationAttributes) { + stack.addFirst((IScopedInstrumentationAttributes) currentObj); + } + currentObj = currentObj.getEnclosingScope(); + } + var builder = stack.stream() + .collect(Utils.foldLeft(Attributes.builder(), (b, iia)->iia.fillAttributesForSpansBelow(b))); + return fillExtraAttributesForThisSpan(builder); + } + + default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { + return builder; + } + + default AttributesBuilder fillExtraAttributesForThisSpan(AttributesBuilder builder) { + return builder; + } + default LongCounter getEndOfScopeCountMetric() { return getMetrics().contextCounter; } @@ -24,7 +61,9 @@ default DoubleHistogram getEndOfScopeDurationMetric() { } default void endSpan() { - getCurrentSpan().end(); + var span = getCurrentSpan(); + span.setAllAttributes(getPopulatedSpanAttributes()); + span.end(); } default void sendMeterEventsForEnd() { @@ -37,12 +76,37 @@ default void close() { sendMeterEventsForEnd(); } + @Override default void addException(Throwable e) { + IWithStartTimeAndAttributes.super.addException(e); getCurrentSpan().recordException(e); - sendMeterEventsForException(e); } - default void sendMeterEventsForException(Throwable e) { - meterIncrementEvent(getMetrics().exceptionCounter); + @Override + default void meterIncrementEvent(LongCounter c, long increment, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + IWithStartTimeAndAttributes.super.meterIncrementEvent(c, increment, attributesBuilder); + } + } + + @Override + default void meterDeltaEvent(LongUpDownCounter c, long delta, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + IWithStartTimeAndAttributes.super.meterDeltaEvent(c, delta, attributesBuilder); + } + } + + @Override + default void meterHistogram(DoubleHistogram histogram, double value, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + IWithStartTimeAndAttributes.super.meterHistogram(histogram, value, attributesBuilder); + } + } + + @Override + default void meterHistogram(LongHistogram histogram, long value, AttributesBuilder attributesBuilder) { + try (var scope = new NullableExemplarScope(getCurrentSpan())) { + IWithStartTimeAndAttributes.super.meterHistogram(histogram, value, attributesBuilder); + } } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 2019bb9c1..320693fe3 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -1,8 +1,7 @@ package org.opensearch.migrations.tracing; import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; @@ -33,6 +32,8 @@ public class RootOtelContext implements IRootOtelContext { private final OpenTelemetry openTelemetryImpl; private final String scopeName; + @Getter + private final MetricInstruments metrics; public static OpenTelemetry initializeOpenTelemetryForCollector(@NonNull String collectorEndpoint, @NonNull String serviceName) { @@ -96,6 +97,12 @@ public static OpenTelemetry initializeNoopOpenTelemetry() { }); } + public static class MetricInstruments extends CommonMetricInstruments { + public MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); + } + } + public RootOtelContext(String scopeName) { this(scopeName, null); } @@ -107,6 +114,7 @@ public RootOtelContext(String scopeName, String collectorEndpoint, String servic public RootOtelContext(String scopeName, OpenTelemetry sdk) { openTelemetryImpl = sdk != null ? sdk : initializeOpenTelemetryWithCollectorOrAsNoop(null, null); this.scopeName = scopeName; + metrics = new MetricInstruments(this.getMeterProvider().get(scopeName), "root"); } @Override @@ -134,21 +142,20 @@ private static SpanBuilder addLinkedToBuilder(Stream linkedSpanContexts, S .orElse(spanBuilder); } - private static Span buildSpanWithParent(SpanBuilder builder, Attributes attrs, Span parentSpan, - Stream linkedSpanContexts) { + private static Span buildSpanWithParent(SpanBuilder builder, Span parentSpan, Stream linkedSpanContexts) { return addLinkedToBuilder(linkedSpanContexts, Optional.ofNullable(parentSpan) .map(p -> builder.setParent(Context.current().with(p))) .orElseGet(builder::setNoParent)) - .startSpan().setAllAttributes(attrs); + .startSpan(); } @Override - public @NonNull Span buildSpan(IInstrumentationAttributes forScope, - String spanName, Stream linkedSpans, AttributesBuilder attributesBuilder) { + public @NonNull Span buildSpan(IScopedInstrumentationAttributes forScope, String spanName, Stream linkedSpans) { assert forScope.getCurrentSpan() == null; - var parentSpan = forScope.getEnclosingScope().getCurrentSpan(); + var forEnclosingScope = forScope.getEnclosingScope(); + var parentSpan = forEnclosingScope == null ? null : forEnclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); - return buildSpanWithParent(spanBuilder, forScope.getPopulatedSpanAttributes(), parentSpan, linkedSpans); + return buildSpanWithParent(spanBuilder, parentSpan, linkedSpans); } } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java index 6072eb346..48a02873a 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/commoncontexts/IConnectionContext.java @@ -13,7 +13,7 @@ public interface IConnectionContext extends IScopedInstrumentationAttributes { String getNodeId(); @Override - default IInstrumentationAttributes getEnclosingScope() { return null; } + default IScopedInstrumentationAttributes getEnclosingScope() { return null; } @Override default AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) { diff --git a/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java index d5fdd268e..45afa2769 100644 --- a/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java +++ b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/IInstrumentationAttributesTest.java @@ -12,9 +12,9 @@ class IInstrumentationAttributesTest { private static final AttributeKey OVERRIDE_KEY = AttributeKey.stringKey("overrideKey"); private static final AttributeKey UNIQUE_KEY = AttributeKey.stringKey("uniqueKey"); - private static class AContext extends BaseNestedSpanContext{ - protected AContext(RootOtelContext rootScope, RootOtelContext enclosingScope) { - super(rootScope, enclosingScope); + private static class AContext extends BaseSpanContext { + protected AContext(RootOtelContext rootScope) { + super(rootScope); } @Override @@ -22,6 +22,11 @@ public String getActivityName() { return "A"; } + @Override + public IScopedInstrumentationAttributes getEnclosingScope() { + return null; + } + @Override public CommonScopedMetricInstruments getMetrics() { return null; @@ -60,7 +65,7 @@ public AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) @Test public void getPopulatedAttributesAreOverrideCorrectly() { var rootCtx = new RootOtelContext("test"); - var aCtx = new AContext(rootCtx, rootCtx); + var aCtx = new AContext(rootCtx); var bCtx = new BContext(rootCtx, aCtx); Optional.ofNullable(aCtx.getPopulatedSpanAttributes()).ifPresent(attrs-> { diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java index 9e18fc251..af2be2a0c 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/RootCaptureContext.java @@ -1,18 +1,25 @@ package org.opensearch.migrations.trafficcapture.proxyserver; import io.opentelemetry.api.OpenTelemetry; +import lombok.Getter; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.IRootKafkaOffloaderContext; import org.opensearch.migrations.trafficcapture.kafkaoffloader.tracing.KafkaRecordContext; import org.opensearch.migrations.trafficcapture.netty.tracing.RootWireLoggingContext; +import org.opensearch.migrations.trafficcapture.netty.tracing.WireCaptureContexts; public class RootCaptureContext extends RootWireLoggingContext implements IRootKafkaOffloaderContext { - public RootCaptureContext(OpenTelemetry capture) { - super(capture); + + public static final String SCOPE_NAME = "captureProxy"; + @Getter + public final KafkaRecordContext.MetricInstruments kafkaOffloadingInstruments; + + public RootCaptureContext(OpenTelemetry openTelemetry) { + this(openTelemetry, SCOPE_NAME); } - @Override - public KafkaRecordContext.MetricInstruments getKafkaOffloadingInstruments() { - var meter = getMeterProvider().get("captureProxy"); - return KafkaRecordContext.makeMetrics(meter); + public RootCaptureContext(OpenTelemetry openTelemetry, String scopeName) { + super(openTelemetry, scopeName); + var meter = this.getMeterProvider().get(scopeName); + kafkaOffloadingInstruments = KafkaRecordContext.makeMetrics(meter); } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java index f7572a3e1..983f98611 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDicts.java @@ -97,8 +97,6 @@ public ParsedHttpMessagesAsDicts(IReplayContexts.ITupleHandlingContext context, public static void fillStatusCodeMetrics(@NonNull IReplayContexts.ITupleHandlingContext context, Optional> sourceResponseOp, Optional> targetResponseOp) { - sourceResponseOp.ifPresent(r -> context.setMethod((String) r.get("Method"))); - sourceResponseOp.ifPresent(r -> context.setEndpoint((String) r.get("Request-URI"))); sourceResponseOp.ifPresent(r -> context.setSourceStatus((Integer) r.get(STATUS_CODE_KEY))); targetResponseOp.ifPresent(r -> context.setTargetStatus((Integer) r.get(STATUS_CODE_KEY))); } @@ -150,7 +148,7 @@ private static Map convertRequest(@NonNull IReplayContexts.ITupl map.put("HTTP-Version", message.protocolVersion().toString()); context.setMethod(message.method().toString()); context.setEndpoint(message.uri()); - context.setHttpVersions(message.protocolVersion().toString()); + context.setHttpVersion(message.protocolVersion().toString()); return fillMap(map, message.headers(), message.content()); }); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index d16ab1902..16fb156cb 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -27,7 +27,7 @@ private ActivityNames() {} public static final String REQUEST_SENDING = "requestSending"; public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; public static final String RECEIVING_RESPONSE = "receivingResponse"; - public static final String TUPLE_HANDLING = "finalizingResults"; + public static final String TUPLE_COMPARISON = "comparingResults"; } public static class MetricNames { @@ -56,7 +56,7 @@ private MetricNames() {} public static final String ACTIVE_TARGET_CONNECTIONS = "activeTargetConnections"; public static final String BYTES_WRITTEN_TO_TARGET = "bytesWrittenToTarget"; public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; - public static final String STATUS_MATCH = "statusMatch"; + public static final String TUPLE_COMPARISON = "tupleComparison"; } public interface IAccumulationScope extends IScopedInstrumentationAttributes { @@ -318,7 +318,7 @@ default String getActivityName() { public interface ITupleHandlingContext extends IAccumulationScope, IWithTypedEnclosingScope { - String ACTIVITY_NAME = ActivityNames.TUPLE_HANDLING; + String ACTIVITY_NAME = ActivityNames.TUPLE_COMPARISON; AttributeKey SOURCE_STATUS_CODE_KEY = AttributeKey.longKey("sourceStatusCode"); AttributeKey TARGET_STATUS_CODE_KEY = AttributeKey.longKey("targetStatusCode"); AttributeKey STATUS_CODE_MATCH_KEY = AttributeKey.booleanKey("statusCodesMatch"); @@ -339,7 +339,7 @@ default String getActivityName() { void setEndpoint(String endpointUrl); - void setHttpVersions(String string); + void setHttpVersion(String string); default UniqueReplayerRequestKey getReplayerRequestKey() { return getLogicalEnclosingScope().getReplayerRequestKey(); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index 34916f722..dc8b7a6bd 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -8,9 +8,11 @@ import lombok.Setter; import org.apache.kafka.common.TopicPartition; import org.opensearch.migrations.tracing.BaseNestedSpanContext; +import org.opensearch.migrations.tracing.CommonMetricInstruments; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.util.Collection; @@ -31,12 +33,13 @@ public AsyncListeningContext(@NonNull RootReplayerContext enclosingScope) { this.enclosingScope = enclosingScope; } - public static class MetricInstruments { + public static class MetricInstruments extends CommonMetricInstruments { public final LongCounter kafkaPartitionsRevokedCounter; public final LongCounter kafkaPartitionsAssignedCounter; public final LongUpDownCounter kafkaActivePartitionsCounter; private MetricInstruments(Meter meter) { + super(meter, "asyncKafkaProcessing"); kafkaPartitionsRevokedCounter = meter .counterBuilder(IKafkaConsumerContexts.MetricNames.PARTITIONS_REVOKED_EVENT_COUNT).build(); kafkaPartitionsAssignedCounter = meter @@ -50,7 +53,7 @@ private MetricInstruments(Meter meter) { return new MetricInstruments(meter); } - private @NonNull MetricInstruments getMetrics() { + @NonNull public MetricInstruments getMetrics() { return enclosingScope.asyncListeningInstruments; } @@ -99,7 +102,7 @@ public TouchScopeContext(@NonNull TrafficSourceContexts.BackPressureBlockContext } public static class PollScopeContext - extends BaseNestedSpanContext + extends BaseNestedSpanContext implements IKafkaConsumerContexts.IPollScopeContext { public static class MetricInstruments extends CommonScopedMetricInstruments { private MetricInstruments(Meter meter, String activityName) { @@ -117,14 +120,14 @@ private MetricInstruments(Meter meter, String activityName) { } public PollScopeContext(@NonNull RootReplayerContext rootScope, - @NonNull IInstrumentationAttributes enclosingScope) { + @NonNull IScopedInstrumentationAttributes enclosingScope) { super(rootScope, enclosingScope); initializeSpan(); } } public static class CommitScopeContext - extends BaseNestedSpanContext + extends BaseNestedSpanContext implements IKafkaConsumerContexts.ICommitScopeContext { @Override @@ -148,7 +151,7 @@ private MetricInstruments(Meter meter, String activityName) { } public CommitScopeContext(@NonNull RootReplayerContext rootScope, - @NonNull IInstrumentationAttributes enclosingScope) { + @NonNull IScopedInstrumentationAttributes enclosingScope) { super(rootScope, enclosingScope); initializeSpan(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index f7a376d8a..af677b228 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.replay.tracing; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; import io.opentelemetry.api.metrics.DoubleHistogram; @@ -16,6 +17,7 @@ import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.time.Duration; import java.time.Instant; @@ -27,7 +29,7 @@ public abstract class ReplayContexts extends IReplayContexts { public static final String BYTES_UNIT_STR = "bytes"; public static class ChannelKeyContext - extends BaseNestedSpanContext + extends BaseNestedSpanContext implements IReplayContexts.IChannelKeyContext { @Getter final ISourceTrafficChannelKey channelKey; @@ -35,7 +37,7 @@ public static class ChannelKeyContext public ChannelKeyContext(RootReplayerContext rootScope, IInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { - super(rootScope, enclosingScope); + super(rootScope, null); this.channelKey = channelKey; initializeSpan(); meterDeltaEvent(getMetrics().activeChannelCounter, 1); @@ -126,12 +128,12 @@ public String getRecordId() { } public static class TrafficStreamLifecycleContext - extends BaseNestedSpanContext + extends BaseNestedSpanContext implements IReplayContexts.ITrafficStreamsLifecycleContext { private final ITrafficStreamKey trafficStreamKey; protected TrafficStreamLifecycleContext(RootReplayerContext rootScope, - IInstrumentationAttributes enclosingScope, + IScopedInstrumentationAttributes enclosingScope, ITrafficStreamKey trafficStreamKey) { super(rootScope, enclosingScope); this.trafficStreamKey = trafficStreamKey; @@ -639,24 +641,18 @@ public static class TupleHandlingContext Integer sourceStatus; Integer targetStatus; String method; - String httpVersion; public TupleHandlingContext(HttpTransactionContext enclosingScope) { super(enclosingScope); initializeSpan(); } - @Override - public void close() { - super.close(); - } - public static class MetricInstruments extends CommonScopedMetricInstruments { private final LongCounter resultCounter; private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); - resultCounter = meter.counterBuilder("tupleResult").build(); + resultCounter = meter.counterBuilder(MetricNames.TUPLE_COMPARISON).build(); } } @@ -668,20 +664,29 @@ private MetricInstruments(Meter meter, String activityName) { return getRootInstrumentationScope().tupleHandlingInstruments; } - @Override - public void sendMeterEventsForEnd() { - super.sendMeterEventsForEnd(); + static final AttributeKey TARGET_STATUS_CODE_ATTR = AttributeKey.longKey("targetStatusCode"); + + public AttributesBuilder getSharedAttributes(AttributesBuilder attributesBuilder) { final var sourceOp = Optional.ofNullable(sourceStatus); final var targetOp = Optional.ofNullable(targetStatus); final boolean didMatch = sourceOp.flatMap(ss -> targetOp.map(ss::equals)).orElse(false); - AttributesBuilder attributesBuilderForAggregate = - addAttributeIfPresent(addAttributeIfPresent(addAttributeIfPresent( - Attributes.builder(), - METHOD_KEY, Optional.ofNullable(method)), - SOURCE_STATUS_CODE_KEY, sourceOp.map(TupleHandlingContext::categorizeStatus)), - TARGET_STATUS_CODE_KEY, targetOp.map(TupleHandlingContext::categorizeStatus)) - .put(STATUS_CODE_MATCH_KEY, didMatch); + return addAttributeIfPresent(addAttributeIfPresent(addAttributeIfPresent( + attributesBuilder, + METHOD_KEY, Optional.ofNullable(method)), + SOURCE_STATUS_CODE_KEY, sourceOp.map(TupleHandlingContext::categorizeStatus)), + TARGET_STATUS_CODE_KEY, targetOp.map(TupleHandlingContext::categorizeStatus)) + .put(STATUS_CODE_MATCH_KEY, didMatch); + } + @Override + public AttributesBuilder fillExtraAttributesForThisSpan(AttributesBuilder builder) { + return getSharedAttributes(super.fillExtraAttributesForThisSpan(builder)); + } + + @Override + public void sendMeterEventsForEnd() { + super.sendMeterEventsForEnd(); + AttributesBuilder attributesBuilderForAggregate = getSharedAttributes(Attributes.builder()); getCurrentSpan().setAllAttributes(attributesBuilderForAggregate.build()); meterIncrementEvent(getMetrics().resultCounter, 1, attributesBuilderForAggregate); } @@ -696,13 +701,23 @@ public static long categorizeStatus(int status) { return (status / 100L) * 100L; } + /** + * Like httpVersion, Endpoint doesn't have a field because it isn't used as an attribute for metrics + * (it would create too much cardinality pressure). So just drop an attribute into a span instead of + * stashing it for both the span and final metric. + */ @Override public void setEndpoint(String endpointUrl) { getCurrentSpan().setAttribute(ENDPOINT_KEY, endpointUrl); } + /** + * Like Endpoint, httpVersion doesn't have a field because it isn't used as an attribute for metrics + * (it just isn't expected to be super-useful and could create too much cardinality pressure). + * So just drop an attribute into a span instead of stashing it for both the span and final metric. + */ @Override - public void setHttpVersions(String httpVersion) { + public void setHttpVersion(String httpVersion) { getCurrentSpan().setAttribute(HTTP_VERSION_KEY, httpVersion); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 5b8671fd5..4f38bc4b3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -66,15 +66,15 @@ public RootReplayerContext(OpenTelemetry sdk) { @Override public TrafficSourceContexts.ReadChunkContext createReadChunkContext() { - return new TrafficSourceContexts.ReadChunkContext(this, this); + return new TrafficSourceContexts.ReadChunkContext(this, null); } public IReplayContexts.IChannelKeyContext createChannelContext(ISourceTrafficChannelKey tsk) { - return new ReplayContexts.ChannelKeyContext(this, this, tsk); + return new ReplayContexts.ChannelKeyContext(this, null, tsk); } public IKafkaConsumerContexts.ICommitScopeContext createCommitContext() { - return new KafkaConsumerContexts.CommitScopeContext(this, this); + return new KafkaConsumerContexts.CommitScopeContext(this, null); } public IReplayContexts.ITrafficStreamsLifecycleContext diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java index 8fb8751b7..7f6663f21 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/TrafficSourceContexts.java @@ -5,13 +5,14 @@ import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.IInstrumentationAttributes; +import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; public class TrafficSourceContexts { private TrafficSourceContexts() {} public static class ReadChunkContext - extends BaseNestedSpanContext + extends BaseNestedSpanContext implements ITrafficSourceContexts.IReadChunkContext { @Override @@ -43,7 +44,7 @@ private MetricInstruments(Meter meter, String activityName) { return getRootInstrumentationScope().readChunkInstruments; } - public ReadChunkContext(RootReplayerContext rootScope, IInstrumentationAttributes enclosingScope) { + public ReadChunkContext(RootReplayerContext rootScope, IScopedInstrumentationAttributes enclosingScope) { super(rootScope, enclosingScope); initializeSpan(); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java index 08e8efa78..2be9a9e69 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -133,7 +133,7 @@ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSp chk.accept(numRequests, "targetTransaction"); chk.accept(numRequests*2, "scheduled"); chk.accept(numRequests, "requestSending"); - chk.accept(numRequests, "finalizingResults"); + chk.accept(numRequests, "comparingResults"); Consumer chkNonZero = k-> { Assertions.assertNotNull(byName.get(k)); From 9a312106e9cb349333c05eac510253d84928d40a Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 5 Feb 2024 23:35:08 -0500 Subject: [PATCH 85/94] README documentation for the Instrumentation + some cleanup. Signed-off-by: Greg Schohn --- .../tracing/KafkaRecordContext.java | 10 +-- TrafficCapture/coreUtilities/README.md | 89 +++++++++++++++++++ .../migrations/tracing/BaseSpanContext.java | 2 +- 3 files changed, 91 insertions(+), 10 deletions(-) create mode 100644 TrafficCapture/coreUtilities/README.md diff --git a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java index 1ef4309fb..98233036f 100644 --- a/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/main/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/KafkaRecordContext.java @@ -23,16 +23,14 @@ public class KafkaRecordContext extends public final String topic; @Getter public final String recordId; - @Getter - public final int recordSize; public KafkaRecordContext(IRootKafkaOffloaderContext rootScope, IConnectionContext enclosingScope, String topic, String recordId, int recordSize) { super(rootScope, enclosingScope); this.topic = topic; this.recordId = recordId; - this.recordSize = recordSize; initializeSpan(); + getCurrentSpan().setAttribute(RECORD_SIZE_ATTR, recordSize); } public static class MetricInstruments extends CommonScopedMetricInstruments { @@ -59,10 +57,4 @@ public AttributesBuilder fillAttributesForSpansBelow(AttributesBuilder builder) .put(TOPIC_ATTR, getTopic()) .put(RECORD_ID_ATTR, getRecordId()); } - - @Override - public AttributesBuilder fillExtraAttributesForThisSpan(AttributesBuilder builder) { - return super.fillExtraAttributesForThisSpan(builder) - .put(RECORD_SIZE_ATTR, getRecordSize()); - } } diff --git a/TrafficCapture/coreUtilities/README.md b/TrafficCapture/coreUtilities/README.md new file mode 100644 index 000000000..3581db355 --- /dev/null +++ b/TrafficCapture/coreUtilities/README.md @@ -0,0 +1,89 @@ +# Traffic Capture/Replay Instrumentation + +This "coreUtilities" package contains common classes and interfaces used to facilitate instrumentation for metrics and +traces. + +## Approach + +The package takes a hard dependency on OpenTelemetry ('otel'). OpenTelemetry provides a unified interface to a +variety of tracing and metering systems. From that unified interface, metric instruments and traces (or "spans") can +be sent to a variety of metric and tracing platforms, including Prometheus, Jaeger, and cloud native solutions like +Amazon CloudWatch and AWS X-Ray. +[RootOtelContext](src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java) acts as a factory for metric +instruments and trace spans. As it is currently implemented, both metrics and spans are exported via +[OTLP](https://opentelemetry.io/docs/specs/otel/protocol/) to an +[OTEL Collector](https://opentelemetry.io/docs/collector/) that proxies instrumentations through further processing +and into downstream systems via exporters. + +It would be redundant to try to make another generic library, so the goal of this package is not to make it easier to +switch instrumentation platforms, but to make instrumentation fit with the TrafficCapture packages more naturally. As +a gradle project, this dependency is exposed as an "api" dependency so that other consumer packages will automatically +pick up the dependency as if it were their own dependency. + +Some of the OpenTelemetry patterns don't work naturally for this asynchronous code with dependency injection. +Specifically, try-with-resources and the use of stack frames to determine contexts are more trouble than they're worth. +Similarly, using statics to store meter values make the code more rigid and can make testing in parallel more difficult. + +This library adapts the parts of OpenTelemetry to make it more natural and more foolproof throughout the rest of the +TrafficCapture packages. This package introduces the concept of "Contexts" to build manage all tracing and metering +instrumentation. + +Just as the otel metering and tracing can be efficiently disabled by not configuring them, this library provides some +future-proofing by defining interfaces to track attributes, activities, exceptions, etc - but through descriptive +interfaces where callers describe which actions they're performing, preventing the code from becoming overly complex. + +The goals of the instrumentation package are to + +1. make instrumentation classes easy to use. +2. make it easy to create new safe and easy to use instrumentation classes. +3. be efficient enough to use in most cases and flexible enough to tune in cases where the cost is too high. + +The third point is still a work in progress as the exact performance penalty isn't understood yet. However, work for +point #2 dovetails into #3. As context creations are chained together, a no-op uber-context can be created with zero +memory footprint and minimal CPU penalty. The first couple points are accomplished by putting contextual information +alongside other data as first class parameters and fields. For example, where a method might require an identifier, +a context might be passed instead so that the function can retrieve identifying information via the context AND have +the ability to instrument activity within the appropriate context. + +## Class Structure Contexts + +All metering and tracing activity within the TrafficCapture packages occurs via "Contexts" which are implementations of +either [IInstrumentationAttributes](src/main/java/org/opensearch/migrations/tracing/IInstrumentationAttributes.java) or +its extension, +[IScopedInstrumentationAttributes](src/main/java/org/opensearch/migrations/tracing/IScopedInstrumentationAttributes.java). +IInstrumentationAttributes allows callers to meter activities into counters and histograms via +[otel instruments](https://opentelemetry.io/docs/concepts/signals/metrics/#metric-instruments). Callers need not know +any specific metric structures in order to add activities. Instead, contexts expose progress APIs that fit the +components that they're designed to work with. + +For example, the +[RequestTransformationContext](../trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java) +class tracks network activity and performance of the http transformation code. That class manages how those +interactions and values are converted to otel instrumentation. That frees application code from implementation details, +makes the application cleaner, and allows all instrumentation concerns to be consolidated. + +IScopedInstrumentationAttributes extensions also provide (auto) instrumentation to indicate when the activities that +they represent began and ended. That information includes the duration of the activity represented by the context, +along with a count of the occurrences of the activity. In addition to those metrics, spans are also created and +emitted as the context is closed. + +The base Attributes interfaces (IInstrumentationAttributes and IScopedInstrumentationAttributes) provide functions to +fill in attributes that are specific to metrics and, independently, specific to spans. Metric values are aggregated +and the more unique attribute combinations possible for each time bucket, the larger the stress on the time-series +database. However, varied attributes can, in some circumstances, be worth the price of extra space and processing time. +Consider the metrics to show status code differences between the source and target clusters. + +In addition to those baseline features, some Context classes (that extend the Attributes interfaces) are capable of +creating child contexts that have a parent relationship with the creating Context. + +## OpenTelemetry Specifics + +While metric instruments can be emitted without any span context, after all the two systems receiving those values are +unrelated, emitting metrics from within a [Scope](https://opentelemetry.io/docs/concepts/instrumentation-scope/) allows +metrics to be linked to [exemplar](https://opentelemetry.io/docs/specs/otel/metrics/data-model/#exemplars) +spans. When Prometheus is used as a metrics data sink, as it is configured in the dockerSolution +(with '--enable-feature=exemplar-storage'), exemplars can be rendered in the same graph as the general data points. + +Since exact values can't be stored within a metrics data store, but we still have a need to render percentiles of +those results, OpenTelemetry uses bucketed histograms. The Contexts will automatically convert a numerical value (or +will calculate the number of milliseconds from the time that the Context was created) into a histogram. \ No newline at end of file diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java index 8564814f4..04b048030 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java @@ -59,7 +59,7 @@ public long getStartNanoTime() { return this.startNanoTime; } - public Span getCurrentSpan() { + public @NonNull Span getCurrentSpan() { return this.currentSpan; } From 805e13b9b59566e1b645bb61da33b6ec0ed72d27 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 6 Feb 2024 12:47:09 -0500 Subject: [PATCH 86/94] When the first bucket size is <=0 for the CommonScopedMetricInstrument constructor override, throw an IllegalArgumentException. Signed-off-by: Greg Schohn --- .../CommonScopedMetricInstruments.java | 3 +++ .../CommonScopedMetricInstrumentsTest.java | 20 +++++++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/CommonScopedMetricInstrumentsTest.java diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java index 957f17fee..836362b86 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java @@ -38,6 +38,9 @@ private static List getExponentialBucketsBetween(double firstBucketSize, private static List getExponentialBucketsBetween(double firstBucketSize, double lastBucketCeiling, double rate) { + if (firstBucketSize <= 0) { + throw new IllegalArgumentException("firstBucketSize value " + firstBucketSize + " must be > 0"); + } double[] bucketBoundary = new double[]{firstBucketSize}; return DoubleStream.generate(() -> { var tmp = bucketBoundary[0]; diff --git a/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/CommonScopedMetricInstrumentsTest.java b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/CommonScopedMetricInstrumentsTest.java new file mode 100644 index 000000000..3f375b2c7 --- /dev/null +++ b/TrafficCapture/coreUtilities/src/test/java/org/opensearch/migrations/tracing/CommonScopedMetricInstrumentsTest.java @@ -0,0 +1,20 @@ +package org.opensearch.migrations.tracing; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class CommonScopedMetricInstrumentsTest { + @Test + public void testThatBadSizeThrowsException() { + Assertions.assertThrows(IllegalArgumentException.class, () -> + new CommonScopedMetricInstruments(null, "testActivity", 0, 2)); + Assertions.assertThrows(IllegalArgumentException.class, () -> + new CommonScopedMetricInstruments(null, "testActivity", -2, 2)); + var otelSdkBundle = new InMemoryInstrumentationBundle(false, false); + Assertions.assertDoesNotThrow(() -> + new CommonScopedMetricInstruments(otelSdkBundle.getOpenTelemetrySdk().getMeter(""), + "testActivity", 1, 8)); + } +} \ No newline at end of file From 9aa3432bbc6121f60a12506c8b10c6b12a5ee11a Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 6 Feb 2024 23:28:57 -0500 Subject: [PATCH 87/94] Add tracing and metrics for replayer sockets as they're created and closed. Signed-off-by: Greg Schohn --- .../composeExtensions/otel-aws-debug.yml | 18 +++++ .../docker/composeExtensions/otel-aws.yml | 4 +- .../main/docker/migrationConsole/Dockerfile | 2 + .../simpleDocumentGenerator.py | 63 ++++++++++++++++++ .../replay/ClientConnectionPool.java | 65 ++++++++----------- .../migrations/replay/ReplayEngine.java | 8 --- .../replay/RequestSenderOrchestrator.java | 2 +- .../replay/tracing/IReplayContexts.java | 8 +++ .../replay/tracing/ReplayContexts.java | 64 +++++++++++++++++- .../replay/tracing/RootReplayerContext.java | 2 + .../FullReplayerWithTracingChecksTest.java | 3 +- .../replay/ParsedHttpMessagesAsDictsTest.java | 2 +- .../replay/ResultsToLogsConsumerTest.java | 2 +- .../NettyPacketToHttpConsumerTest.java | 23 +++++-- .../replay/tracing/TracingTest.java | 2 + .../tracing/InstrumentationTest.java | 4 +- 16 files changed, 211 insertions(+), 61 deletions(-) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws-debug.yml create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws-debug.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws-debug.yml new file mode 100644 index 000000000..c1035379a --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws-debug.yml @@ -0,0 +1,18 @@ +version: '3.7' +services: + + otel-collector: + image: migrations/otel_collector:latest + command: ["--config=/etc/otel-config-aws-debug.yaml", "${OTELCOL_ARGS}"] + networks: + - migrations + ports: + - "13133:13133" # health_check extension + - "4317:4317" # otlp receiver + volumes: + - ~/.aws:/home/aoc/.aws + environment: + - ANALYTICS_DOMAIN_ENDPOINT=opensearchanalytics # use the local container for compose here + - AWS_REGION=us-east-2 + - AWS_DEFAULT_REGION=us-east-2 + - AWS_PROFILE=default \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml index 18366e059..40f776eca 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-aws.yml @@ -13,6 +13,6 @@ services: - ~/.aws:/home/aoc/.aws environment: - ANALYTICS_DOMAIN_ENDPOINT=opensearchanalytics # use the local container for compose here - - AWS_REGION=us-east-1 - - AWS_DEFAULT_REGION=us-east-1 + - AWS_REGION=us-east-2 + - AWS_DEFAULT_REGION=us-east-2 - AWS_PROFILE=default \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile index 69186c587..2b6fd2d42 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile @@ -11,6 +11,7 @@ RUN mkdir /root/kafka-tools RUN mkdir /root/kafka-tools/aws COPY runTestBenchmarks.sh /root/ COPY humanReadableLogs.py /root/ +COPY simpleDocumentGenerator.py /root/ COPY catIndices.sh /root/ COPY showFetchMigrationCommand.sh /root/ COPY setupIntegTests.sh /root/ @@ -18,6 +19,7 @@ COPY msk-iam-auth.properties /root/kafka-tools/aws COPY kafkaCmdRef.md /root/kafka-tools RUN chmod ug+x /root/runTestBenchmarks.sh RUN chmod ug+x /root/humanReadableLogs.py +RUN chmod ug+x /root/simpleDocumentGenerator.py RUN chmod ug+x /root/catIndices.sh RUN chmod ug+x /root/showFetchMigrationCommand.sh WORKDIR /root/kafka-tools diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py new file mode 100644 index 000000000..6a9df4240 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +import requests +import time +import argparse +from datetime import datetime + +# url_base="http://test.elb.us-west-2.amazonaws.com:9200" +username='admin' +password='admin' + +# Function to get current date in a specific format for indexing +def get_current_date_index(): + return datetime.now().strftime("%Y-%m-%d") + +# Function to send a request +def send_request(index, counter, url_base): + url = f"{url_base}/{index}/_doc/{counter}" + timestamp = datetime.now().isoformat() + # Basic Authentication + auth = (username, password) + payload = { + "timestamp": timestamp, + "new_field": "apple" + } + + try: + #response = requests.put(url, json=payload, auth=auth) + response = requests.put(url, auth=auth, json=payload, verify=False) + print(response.text) + print(f"Request sent at {timestamp}: {response.status_code}") + return response.status_code + except requests.RequestException as e: + print(f"Error sending request: {e}") + return None + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--endpoint", help="Source cluster endpoint e.g. http://test.elb.us-west-2.amazonaws.com:9200.") + return parser.parse_args() + +args = parse_args() +# Main loop +counter = 1 +total2xxCount = 0 +total4xxCount = 0 +total5xxCount = 0 +totalErrorCount = 0 +while True: + current_index = get_current_date_index() + response_code = send_request(current_index, counter, args.endpoint) + if (response_code is not None): + first_digit = int(str(response_code)[:1]) + if (first_digit == 2): + total2xxCount += 1 + elif (first_digit == 4): + total4xxCount += 1 + elif (first_digit == 5): + total5xxCount += 1 + else: + totalErrorCount += 1 + print(f"Summary: 2xx responses = {total2xxCount}, 4xx responses = {total4xxCount}, 5xx responses = {total5xxCount}, Error requests = {totalErrorCount}") + counter += 1 + time.sleep(0.1) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index 4fdbae4fe..a34861380 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -17,26 +17,35 @@ import org.opensearch.migrations.replay.datahandlers.NettyPacketToHttpConsumer; import org.opensearch.migrations.replay.datatypes.ConnectionReplaySession; import org.opensearch.migrations.replay.tracing.IReplayContexts; +import org.opensearch.migrations.replay.tracing.ReplayContexts; import org.opensearch.migrations.replay.util.DiagnosticTrackableCompletableFuture; import org.opensearch.migrations.replay.util.StringTrackableCompletableFuture; import java.net.URI; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @Slf4j public class ClientConnectionPool { - private static final ContextKey RECORD_ID_KEY = ContextKey.named("recordId"); - public static final String TELEMETRY_SCOPE_NAME = "ClientConnectionPool"; public static final String TARGET_CONNECTION_POOL_NAME = "targetConnectionPool"; private final URI serverUri; private final SslContext sslContext; public final NioEventLoopGroup eventLoopGroup; private final LoadingCache connectionId2ChannelCache; - private final AtomicInteger numConnectionsCreated = new AtomicInteger(0); - private final AtomicInteger numConnectionsClosed = new AtomicInteger(0); + public ConnectionReplaySession buildConnectionReplaySession(final IReplayContexts.IChannelKeyContext channelKeyCtx) { + if (eventLoopGroup.isShuttingDown()) { + throw new IllegalStateException("Event loop group is shutting down. Not creating a new session."); + } + log.trace("creating connection session"); + channelKeyCtx.onSocketConnectionCreated(); + // arguably the most only thing that matters here is associating this item with an + // EventLoop (thread). As the channel needs to be recycled, we'll come back to the + // event loop that was tied to the original channel to bind all future channels to + // the same event loop. That means that we don't have to worry about concurrent + // accesses/changes to the OTHER value that we're storing within the cache. + return new ConnectionReplaySession(eventLoopGroup.next()); + } public ClientConnectionPool(URI serverUri, SslContext sslContext, int numThreads) { this.serverUri = serverUri; @@ -44,22 +53,9 @@ public ClientConnectionPool(URI serverUri, SslContext sslContext, int numThreads this.eventLoopGroup = new NioEventLoopGroup(numThreads, new DefaultThreadFactory(TARGET_CONNECTION_POOL_NAME)); - connectionId2ChannelCache = CacheBuilder.newBuilder().build(new CacheLoader<>() { - @Override - public ConnectionReplaySession load(final String s) { - if (eventLoopGroup.isShuttingDown()) { - throw new IllegalStateException("Event loop group is shutting down. Not creating a new session."); - } - numConnectionsCreated.incrementAndGet(); - log.trace("creating connection session"); - // arguably the most only thing that matters here is associating this item with an - // EventLoop (thread). As the channel needs to be recycled, we'll come back to the - // event loop that was tied to the original channel to bind all future channels to - // the same event loop. That means that we don't have to worry about concurrent - // accesses/changes to the OTHER value that we're storing within the cache. - return new ConnectionReplaySession(eventLoopGroup.next()); - } - }); + connectionId2ChannelCache = CacheBuilder.newBuilder().build(CacheLoader.from(key -> { + throw new UnsupportedOperationException("Use Cache.get(key, callable) instead"); + })); } private DiagnosticTrackableCompletableFuture @@ -84,14 +80,6 @@ public ConnectionReplaySession load(final String s) { }); } - public int getNumConnectionsCreated() { - return numConnectionsCreated.get(); - } - - public int getNumConnectionsClosed() { - return numConnectionsClosed.get(); - } - public Future shutdownNow() { connectionId2ChannelCache.invalidateAll(); return eventLoopGroup.shutdownGracefully(); @@ -127,7 +115,8 @@ public DiagnosticTrackableCompletableFuture closeConnectionsAndShu () -> "Final shutdown for " + this.getClass().getSimpleName()); } - public void closeConnection(String connId) { + public void closeConnection(IReplayContexts.IChannelKeyContext ctx) { + var connId = ctx.getConnectionId(); log.atInfo().setMessage(() -> "closing connection for " + connId).log(); var channelsFuture = connectionId2ChannelCache.getIfPresent(connId); if (channelsFuture != null) { @@ -154,15 +143,17 @@ public void closeConnection(String connId) { } @SneakyThrows - public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyContext channelKey, boolean dontCreate) { + public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyContext channelKeyCtx, + boolean dontCreate) { - var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKey.getConnectionId()) : - connectionId2ChannelCache.get(channelKey.getConnectionId()); + var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKeyCtx.getConnectionId()) : + connectionId2ChannelCache.get(channelKeyCtx.getConnectionId(), + () -> buildConnectionReplaySession(channelKeyCtx)); if (crs != null) { - crs.setChannelContext(channelKey); + crs.setChannelContext(channelKeyCtx); } - log.atTrace().setMessage(()->"returning ReplaySession=" + crs + " for " + channelKey.getConnectionId() + - " from " + channelKey).log(); + log.atTrace().setMessage(()->"returning ReplaySession=" + crs + " for " + channelKeyCtx.getConnectionId() + + " from " + channelKeyCtx).log(); return crs; } @@ -172,7 +163,7 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"Waiting for closeFuture() on channel"); - numConnectionsClosed.incrementAndGet(); + channelAndFutureWork.getChannelContext().onSocketConnectionClosed(); channelAndFutureWork.getChannelFutureFuture().map(cff->cff .thenAccept(cf-> { cf.channel().close() diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java index d6f802b3a..9e69ad7c8 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ReplayEngine.java @@ -168,14 +168,6 @@ public DiagnosticTrackableCompletableFuture closeConnectionsAndShu return networkSendOrchestrator.clientConnectionPool.closeConnectionsAndShutdown(); } - public int getNumConnectionsCreated() { - return networkSendOrchestrator.clientConnectionPool.getNumConnectionsCreated(); - } - - public int getNumConnectionsClosed() { - return networkSendOrchestrator.clientConnectionPool.getNumConnectionsClosed(); - } - public void setFirstTimestamp(Instant firstPacketTimestamp) { timeShifter.setFirstTimestamp(firstPacketTimestamp); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java index d8e068837..9639b1897 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/RequestSenderOrchestrator.java @@ -93,7 +93,7 @@ public StringTrackableCompletableFuture scheduleClose(IReplayContexts.ICha channelFutureAndRequestSchedule, finalTunneledResponse, timestamp, new ChannelTask(ChannelTaskType.CLOSE, () -> { log.trace("Closing client connection " + channelInteraction); - clientConnectionPool.closeConnection(channelKey.getConnectionId()); + clientConnectionPool.closeConnection(ctx); finalTunneledResponse.future.complete(null); }))); return finalTunneledResponse; diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 16fb156cb..15aa57ade 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -16,6 +16,7 @@ public static class ActivityNames { private ActivityNames() {} public static final String CHANNEL = "channel"; + public static final String TCP_CONNECTION = "tcpConnection"; public static final String RECORD_LIFETIME = "recordLifetime"; public static final String TRAFFIC_STREAM_LIFETIME = "trafficStreamLifetime"; public static final String HTTP_TRANSACTION = "httpTransaction"; @@ -53,7 +54,10 @@ private MetricNames() {} public static final String TRANSFORM_CHUNKS_OUT = "transformChunksOut"; public static final String NETTY_SCHEDULE_LAG = "scheduleLag"; public static final String SOURCE_TO_TARGET_REQUEST_LAG = "lagBetweenSourceAndTargetRequests"; + public static final String ACTIVE_CHANNELS_YET_TO_BE_FULLY_DISCARDED = "activeReplayerChannels"; public static final String ACTIVE_TARGET_CONNECTIONS = "activeTargetConnections"; + public static final String CONNECTIONS_OPENED = "connectionsOpened"; + public static final String CONNECTIONS_CLOSED = "connectionsClosedCount"; public static final String BYTES_WRITTEN_TO_TARGET = "bytesWrittenToTarget"; public static final String BYTES_READ_FROM_TARGET = "bytesReadFromTarget"; public static final String TUPLE_COMPARISON = "tupleComparison"; @@ -83,6 +87,10 @@ default String getConnectionId() { default String getNodeId() { return getChannelKey().getNodeId(); } + + void onSocketConnectionCreated(); + + void onSocketConnectionClosed(); } public interface IKafkaRecordContext diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index af677b228..efebfc1ad 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -28,16 +28,50 @@ public abstract class ReplayContexts extends IReplayContexts { public static final String COUNT_UNIT_STR = "count"; public static final String BYTES_UNIT_STR = "bytes"; + public static class SocketContext + extends BaseNestedSpanContext + implements IScopedInstrumentationAttributes { + + public static final String ACTIVITY_NAME = ActivityNames.TCP_CONNECTION; + + protected SocketContext(RootReplayerContext rootScope, ChannelKeyContext enclosingScope) { + super(rootScope, enclosingScope); + initializeSpan(); + } + + @Override + public String getActivityName() { + return ACTIVITY_NAME; + } + + public static class MetricInstruments extends CommonScopedMetricInstruments { + private MetricInstruments(Meter meter, String activityName) { + super(meter, activityName); + } + } + + public static MetricInstruments makeMetrics(Meter meter) { + return new MetricInstruments(meter, ACTIVITY_NAME); + } + + @Override + public CommonScopedMetricInstruments getMetrics() { + return getRootInstrumentationScope().socketInstruments; + } + } + public static class ChannelKeyContext extends BaseNestedSpanContext implements IReplayContexts.IChannelKeyContext { @Getter final ISourceTrafficChannelKey channelKey; + SocketContext socketContext; + public ChannelKeyContext(RootReplayerContext rootScope, - IInstrumentationAttributes enclosingScope, + IScopedInstrumentationAttributes enclosingScope, ISourceTrafficChannelKey channelKey) { - super(rootScope, null); + super(rootScope, enclosingScope); this.channelKey = channelKey; initializeSpan(); meterDeltaEvent(getMetrics().activeChannelCounter, 1); @@ -45,11 +79,20 @@ public ChannelKeyContext(RootReplayerContext rootScope, public static class MetricInstruments extends CommonScopedMetricInstruments { final LongUpDownCounter activeChannelCounter; + final LongUpDownCounter activeSocketConnectionsCounter; + final LongCounter channelCreatedCounter; + final LongCounter channelClosedCounter; private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); activeChannelCounter = meter + .upDownCounterBuilder(MetricNames.ACTIVE_CHANNELS_YET_TO_BE_FULLY_DISCARDED).build(); + activeSocketConnectionsCounter = meter .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); + channelCreatedCounter = meter + .counterBuilder(MetricNames.CONNECTIONS_OPENED).build(); + channelClosedCounter = meter + .counterBuilder(MetricNames.CONNECTIONS_CLOSED).build(); } } @@ -71,6 +114,23 @@ public void sendMeterEventsForEnd() { super.sendMeterEventsForEnd(); meterDeltaEvent(getMetrics().activeChannelCounter, -1); } + + @Override + public void onSocketConnectionCreated() { + assert socketContext == null; + socketContext = new SocketContext(rootInstrumentationScope, this); + meterIncrementEvent(getMetrics().channelCreatedCounter); + meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, 1); + } + + @Override + public void onSocketConnectionClosed() { + assert socketContext != null; + socketContext.close(); + socketContext = null; + meterIncrementEvent(getMetrics().channelClosedCounter); + meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, -1); + } } public static class KafkaRecordContext diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java index 4f38bc4b3..8fe51cacf 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/RootReplayerContext.java @@ -34,6 +34,7 @@ public class RootReplayerContext extends RootOtelContext implements IRootReplaye public final ReplayContexts.WaitingForHttpResponseContext.MetricInstruments waitingForHttpResponseInstruments; public final ReplayContexts.ReceivingHttpResponseContext.MetricInstruments receivingHttpInstruments; public final ReplayContexts.TupleHandlingContext.MetricInstruments tupleHandlingInstruments; + public final ReplayContexts.SocketContext.MetricInstruments socketInstruments; public RootReplayerContext(OpenTelemetry sdk) { super(SCOPE_NAME, sdk); @@ -50,6 +51,7 @@ public RootReplayerContext(OpenTelemetry sdk) { waitForNextSignalInstruments = TrafficSourceContexts.WaitForNextSignal.makeMetrics(meter); channelKeyInstruments = ReplayContexts.ChannelKeyContext.makeMetrics(meter); + socketInstruments = ReplayContexts.SocketContext.makeMetrics(meter); kafkaRecordInstruments = ReplayContexts.KafkaRecordContext.makeMetrics(meter); trafficStreamLifecycleInstruments = ReplayContexts.TrafficStreamLifecycleContext.makeMetrics(meter); httpTransactionInstruments = ReplayContexts.HttpTransactionContext.makeMetrics(meter); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java index 2be9a9e69..8b5a2c57a 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -35,7 +35,7 @@ @WrapWithNettyLeakDetection(disableLeakChecks = true) public class FullReplayerWithTracingChecksTest extends FullTrafficReplayerTest { - protected TestContext makeContext() { return TestContext.withAllTracking(); } + protected TestContext makeInstrumentationContext() { return TestContext.withAllTracking(); } @Test public void testSingleStreamWithCloseIsCommitted() throws Throwable { @@ -125,6 +125,7 @@ private void checkSpansForSimpleReplayedTransactions(InMemorySpanExporter testSp byName.remove(k); }; chk.accept(1,"channel"); + chk.accept(1,"tcpConnection"); chk.accept(1, "trafficStreamLifetime"); chk.accept(numRequests, "httpTransaction"); chk.accept(numRequests, "accumulatingRequest"); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java index b71fabd32..056e33a64 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ParsedHttpMessagesAsDictsTest.java @@ -13,7 +13,7 @@ ParsedHttpMessagesAsDicts makeTestData() { } @Override - protected TestContext makeContext() { + protected TestContext makeInstrumentationContext() { return TestContext.withTracking(false, true); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 164ec5573..3ff102293 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -38,7 +38,7 @@ class ResultsToLogsConsumerTest extends InstrumentationTest { public static final String TEST_EXCEPTION_MESSAGE = "TEST_EXCEPTION"; @Override - protected TestContext makeContext() { + protected TestContext makeInstrumentationContext() { return TestContext.withTracking(false, true); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 3f9118b12..a7be99523 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -27,6 +27,7 @@ import org.opensearch.migrations.testutils.SimpleHttpServer; import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import org.opensearch.migrations.tracing.InstrumentationTest; +import org.opensearch.migrations.tracing.TestContext; import javax.net.ssl.SSLException; import java.io.IOException; @@ -75,8 +76,8 @@ public class NettyPacketToHttpConsumerTest extends InstrumentationTest { @BeforeAll public static void setupTestServer() throws PortFinder.ExceededMaxPortAssigmentAttemptException { testServers = Map.of( - false, SimpleHttpServer.makeServer(false, NettyPacketToHttpConsumerTest::makeContext), - true, SimpleHttpServer.makeServer(true, NettyPacketToHttpConsumerTest::makeContext)); + false, SimpleHttpServer.makeServer(false, NettyPacketToHttpConsumerTest::makeResponseContext), + true, SimpleHttpServer.makeServer(true, NettyPacketToHttpConsumerTest::makeResponseContext)); } @AfterAll @@ -110,7 +111,7 @@ private SimpleHttpResponse makeTestRequestViaClient(SimpleHttpClientForTesting c "User-Agent", "UnitTest").entrySet().stream()); } - private static SimpleHttpResponse makeContext(HttpFirstLine request) { + private static SimpleHttpResponse makeResponseContext(HttpFirstLine request) { var headers = Map.of( "Content-Type", "text/plain", "Funtime", "checkIt!", @@ -146,6 +147,7 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except @ValueSource(booleans = {false, true}) public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) throws SSLException, ExecutionException, InterruptedException { + var trackingContext = TestContext.withTracking(false, true); var testServer = testServers.get(useTls); var sslContext = !testServer.localhostEndpoint().getScheme().equalsIgnoreCase("https") ? null : SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); @@ -159,7 +161,7 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) new TestFlowController(), timeShifter); for (int j = 0; j < 2; ++j) { for (int i = 0; i < 2; ++i) { - var ctx = rootContext.getTestConnectionRequestContext("TEST_" + i, j); + var ctx = trackingContext.getTestConnectionRequestContext("TEST_" + i, j); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, sendingFactory, ctx, Instant.now(), Instant.now(), () -> Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); @@ -178,8 +180,17 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) var stopFuture = sendingFactory.closeConnectionsAndShutdown(); log.info("waiting for factory to shutdown: " + stopFuture); stopFuture.get(); - Assertions.assertEquals(2, sendingFactory.getNumConnectionsCreated()); - Assertions.assertEquals(2, sendingFactory.getNumConnectionsClosed()); + Thread.sleep(200); // let metrics settle down + var allMetricData = trackingContext.inMemoryInstrumentationBundle.testMetricExporter.getFinishedMetricItems(); + long tcpOpenConnectionCount = allMetricData.stream().filter(md->md.getName().startsWith("tcpConnectionCount")) + .reduce((a,b)->b).get().getLongSumData().getPoints().stream().reduce((a,b)->b).get().getValue(); + long connectionsOpenedCount = allMetricData.stream().filter(md->md.getName().startsWith("connectionsOpened")) + .reduce((a,b)->b).get().getLongSumData().getPoints().stream().reduce((a,b)->b).get().getValue(); + long connectionsClosedCount = allMetricData.stream().filter(md->md.getName().startsWith("connectionsClosed")) + .reduce((a,b)->b).get().getLongSumData().getPoints().stream().reduce((a,b)->b).get().getValue(); + Assertions.assertEquals(2, tcpOpenConnectionCount); + Assertions.assertEquals(2, connectionsOpenedCount); + Assertions.assertEquals(2, connectionsClosedCount); } private static String normalizeMessage(String s) { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index ffd504ba8..f5954363e 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -49,6 +49,8 @@ public void tracingWorks() { } } } + channelCtx.onSocketConnectionCreated(); + channelCtx.onSocketConnectionClosed(); } var recordedSpans = rootContext.inMemoryInstrumentationBundle.testSpanExporter.getFinishedSpanItems(); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java index a1f0f686d..826ada529 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/InstrumentationTest.java @@ -7,13 +7,13 @@ public class InstrumentationTest { protected TestContext rootContext; - protected TestContext makeContext() { + protected TestContext makeInstrumentationContext() { return TestContext.noOtelTracking(); } @BeforeEach protected void initializeContext() { - rootContext = makeContext(); + rootContext = makeInstrumentationContext(); } @AfterEach From 48a45ab0eb5d090be3d7fc4ac1c9952fa55c8117 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 7 Feb 2024 12:40:32 -0500 Subject: [PATCH 88/94] Bugfix, test fix, lint fix. Bugfix is in the capture proxy's channel context's `sendMeterEventsForEnd()` override to call super so that we'll pickup duration, etc metrics too. The lint fix is in a new python script to facilitate testing from the migration console. The test fix is to give each TrafficReplayer run a fresh TestContext. That context includes channel contexts, which should not be reused across process boundaries and likewise shouldn't be getting reused if we're trying to simulate that for repeated runs. Signed-off-by: Greg Schohn --- .../tracing/ConnectionContext.java | 1 + .../main/docker/migrationConsole/Dockerfile | 1 + .../simpleDocumentGenerator.py | 19 +++++++++--- .../netty/tracing/IWireCaptureContexts.java | 2 ++ .../FullReplayerWithTracingChecksTest.java | 14 ++++----- .../replay/FullTrafficReplayerTest.java | 30 ++++++++++--------- .../KafkaRestartingTrafficReplayerTest.java | 7 +++-- .../replay/TrafficReplayerRunner.java | 9 ++++-- 8 files changed, 52 insertions(+), 31 deletions(-) diff --git a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java index 4fdfb5776..d4ce9906c 100644 --- a/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java +++ b/TrafficCapture/captureOffloader/src/main/java/org/opensearch/migrations/trafficcapture/tracing/ConnectionContext.java @@ -51,6 +51,7 @@ protected MetricInstruments(Meter meter, String activityName) { @Override public void sendMeterEventsForEnd() { + super.sendMeterEventsForEnd(); meterDeltaEvent(getMetrics().activeConnectionsCounter, -1); } } diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile index 2b6fd2d42..26e9c7c4d 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile @@ -17,6 +17,7 @@ COPY showFetchMigrationCommand.sh /root/ COPY setupIntegTests.sh /root/ COPY msk-iam-auth.properties /root/kafka-tools/aws COPY kafkaCmdRef.md /root/kafka-tools +RUN ln -s /usr/bin/python3 /usr/bin/python RUN chmod ug+x /root/runTestBenchmarks.sh RUN chmod ug+x /root/humanReadableLogs.py RUN chmod ug+x /root/simpleDocumentGenerator.py diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py index 6a9df4240..e22ceece5 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/simpleDocumentGenerator.py @@ -5,13 +5,20 @@ from datetime import datetime # url_base="http://test.elb.us-west-2.amazonaws.com:9200" -username='admin' -password='admin' +username = 'admin' +password = 'admin' + +session = requests.Session() +keep_alive_headers = { + 'Connection': 'keep-alive' +} + # Function to get current date in a specific format for indexing def get_current_date_index(): return datetime.now().strftime("%Y-%m-%d") + # Function to send a request def send_request(index, counter, url_base): url = f"{url_base}/{index}/_doc/{counter}" @@ -24,8 +31,9 @@ def send_request(index, counter, url_base): } try: + # a new connection for every request #response = requests.put(url, json=payload, auth=auth) - response = requests.put(url, auth=auth, json=payload, verify=False) + response = session.put(url, json=payload, auth=auth, headers=keep_alive_headers, verify=False) print(response.text) print(f"Request sent at {timestamp}: {response.status_code}") return response.status_code @@ -33,11 +41,13 @@ def send_request(index, counter, url_base): print(f"Error sending request: {e}") return None + def parse_args(): parser = argparse.ArgumentParser() parser.add_argument("--endpoint", help="Source cluster endpoint e.g. http://test.elb.us-west-2.amazonaws.com:9200.") return parser.parse_args() + args = parse_args() # Main loop counter = 1 @@ -58,6 +68,7 @@ def parse_args(): total5xxCount += 1 else: totalErrorCount += 1 - print(f"Summary: 2xx responses = {total2xxCount}, 4xx responses = {total4xxCount}, 5xx responses = {total5xxCount}, Error requests = {totalErrorCount}") + print(f"Summary: 2xx responses = {total2xxCount}, 4xx responses = {total4xxCount}, " + f"5xx responses = {total5xxCount}, Error requests = {totalErrorCount}") counter += 1 time.sleep(0.1) diff --git a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java index 26d9b4e08..8ac05baa6 100644 --- a/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java +++ b/TrafficCapture/nettyWireLogging/src/main/java/org/opensearch/migrations/trafficcapture/netty/tracing/IWireCaptureContexts.java @@ -7,6 +7,7 @@ public abstract class IWireCaptureContexts { public static class ActivityNames { + private ActivityNames() {} public static final String BLOCKED = "blocked"; public static final String GATHERING_REQUEST = "gatheringRequest"; public static final String WAITING_FOR_RESPONSE = "waitingForResponse"; @@ -14,6 +15,7 @@ public static class ActivityNames { } public static class MetricNames { + private MetricNames() {} public static final String UNREGISTERED = "unregistered"; public static final String REMOVED = "removed"; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java index 8b5a2c57a..2cf75f85f 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -35,8 +35,6 @@ @WrapWithNettyLeakDetection(disableLeakChecks = true) public class FullReplayerWithTracingChecksTest extends FullTrafficReplayerTest { - protected TestContext makeInstrumentationContext() { return TestContext.withAllTracking(); } - @Test public void testSingleStreamWithCloseIsCommitted() throws Throwable { var random = new Random(1); @@ -48,10 +46,11 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { .addSubStream(TrafficObservation.newBuilder() .setClose(CloseObservation.newBuilder().build()).build()) .build(); - var trafficSourceSupplier = new FullTrafficReplayerTest.ArrayCursorTrafficSourceFactory(rootContext, - List.of(trafficStreamWithJustClose)); - TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(rootContext, 0, - httpServer.localhostEndpoint(), new FullTrafficReplayerTest.IndexWatchingListenerFactory(), trafficSourceSupplier); + var trafficSourceSupplier = new FullTrafficReplayerTest.ArrayCursorTrafficSourceFactory(List.of(trafficStreamWithJustClose)); + TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(0, + httpServer.localhostEndpoint(), new FullTrafficReplayerTest.IndexWatchingListenerFactory(), + () -> TestContext.noOtelTracking(), + trafficSourceSupplier); Assertions.assertEquals(1, trafficSourceSupplier.nextReadCursor.get()); log.info("done"); } @@ -59,6 +58,7 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { @ParameterizedTest @ValueSource(ints = {1,2}) public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) throws Throwable { + var rootContext = TestContext.withAllTracking(); var random = new Random(1); var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), response->TestHttpServerContext.makeResponse(random, response)); @@ -91,7 +91,7 @@ public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) thro .build(); var trafficSource = new ArrayCursorTrafficCaptureSource(rootContext, - new ArrayCursorTrafficSourceFactory(rootContext, List.of(trafficStream))); + new ArrayCursorTrafficSourceFactory(List.of(trafficStream))); var tr = new TrafficReplayer(rootContext, httpServer.localhostEndpoint(), null, new StaticAuthTransformerFactory("TEST"), null, true, 10, 10 * 1024); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index 88e2c6cde..8b50fc722 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -30,6 +30,7 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -40,7 +41,7 @@ // to the test server, a shutdown will stop those work threads without letting them flush through all of their work // (since that could take a very long time) and some of the work might have been followed by resource releases. @WrapWithNettyLeakDetection(disableLeakChecks = true) -public class FullTrafficReplayerTest extends InstrumentationTest { +public class FullTrafficReplayerTest { public static final int INITIAL_STOP_REPLAYER_REQUEST_COUNT = 1; public static final String TEST_NODE_ID = "TestNodeId"; @@ -77,16 +78,19 @@ public void fullTest(int testSize, boolean randomize) throws Throwable { var random = new Random(1); var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(200), response -> TestHttpServerContext.makeResponse(random, response)); - var streamAndConsumer = - TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(rootContext, testSize, randomize); - var numExpectedRequests = streamAndConsumer.numHttpTransactions; - var trafficStreams = streamAndConsumer.stream.collect(Collectors.toList()); + var streamAndSizes = TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(TestContext.noOtelTracking(), + testSize, randomize); + var numExpectedRequests = streamAndSizes.numHttpTransactions; + var trafficStreams = streamAndSizes.stream.collect(Collectors.toList()); log.atInfo().setMessage(() -> trafficStreams.stream().map(ts -> TrafficStreamUtils.summarizeTrafficStream(ts)) .collect(Collectors.joining("\n"))).log(); - var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(rootContext, trafficStreams); - TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(rootContext, numExpectedRequests, - httpServer.localhostEndpoint(), new IndexWatchingListenerFactory(), trafficSourceSupplier); - Assertions.assertEquals(trafficSourceSupplier.trafficStreamsList.size(), trafficSourceSupplier.nextReadCursor.get()); + var trafficSourceSupplier = new ArrayCursorTrafficSourceFactory(trafficStreams); + TrafficReplayerRunner.runReplayerUntilSourceWasExhausted( + numExpectedRequests, httpServer.localhostEndpoint(), new IndexWatchingListenerFactory(), + () -> TestContext.noOtelTracking(), + trafficSourceSupplier); + Assertions.assertEquals(trafficSourceSupplier.trafficStreamsList.size(), + trafficSourceSupplier.nextReadCursor.get()); log.info("done"); } @@ -115,17 +119,15 @@ public int compareTo(TrafficStreamCursorKey other) { } } - protected static class ArrayCursorTrafficSourceFactory implements Supplier { - private final TestContext rootContext; + protected static class ArrayCursorTrafficSourceFactory implements Function { List trafficStreamsList; AtomicInteger nextReadCursor = new AtomicInteger(); - public ArrayCursorTrafficSourceFactory(TestContext rootContext, List trafficStreamsList) { - this.rootContext = rootContext; + public ArrayCursorTrafficSourceFactory(List trafficStreamsList) { this.trafficStreamsList = trafficStreamsList; } - public ISimpleTrafficCaptureSource get() { + public ISimpleTrafficCaptureSource apply(TestContext rootContext) { var rval = new ArrayCursorTrafficCaptureSource(rootContext, this); log.info("trafficSource="+rval+" readCursor="+rval.readCursor.get()+" nextReadCursor="+ nextReadCursor.get()); return rval; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java index ba0d3013b..9dd8164c7 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/KafkaRestartingTrafficReplayerTest.java @@ -89,16 +89,17 @@ public void fullTest(int testSize, boolean randomize) throws Throwable { var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), response->TestHttpServerContext.makeResponse(random, response)); var streamAndConsumer = - TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(rootContext, testSize, randomize); + TrafficStreamGenerator.generateStreamAndSumOfItsTransactions(TestContext.noOtelTracking(), testSize, randomize); var trafficStreams = streamAndConsumer.stream.collect(Collectors.toList()); log.atInfo().setMessage(()->trafficStreams.stream().map(TrafficStreamUtils::summarizeTrafficStream) .collect(Collectors.joining("\n"))).log(); loadStreamsToKafka(buildKafkaConsumer(), Streams.concat(trafficStreams.stream(), Stream.of(SENTINEL_TRAFFIC_STREAM))); - TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(rootContext, streamAndConsumer.numHttpTransactions, + TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(streamAndConsumer.numHttpTransactions, httpServer.localhostEndpoint(), new CounterLimitedReceiverFactory(), - () -> new SentinelSensingTrafficSource( + () -> TestContext.noOtelTracking(), + rootContext -> new SentinelSensingTrafficSource( new KafkaTrafficCaptureSource(rootContext, buildKafkaConsumer(), TEST_TOPIC_NAME, Duration.ofMillis(DEFAULT_POLL_INTERVAL_MS)))); log.info("done"); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index b69dd829c..74d9e0920 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -21,6 +21,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -35,9 +36,10 @@ static class FabricatedErrorToKillTheReplayer extends Error { private TrafficReplayerRunner() {} - static void runReplayerUntilSourceWasExhausted(TestContext rootContext, int numExpectedRequests, URI endpoint, + static void runReplayerUntilSourceWasExhausted(int numExpectedRequests, URI endpoint, Supplier> tupleListenerSupplier, - Supplier trafficSourceSupplier) + Supplier rootContextSupplier, + Function trafficSourceFactory) throws Throwable { AtomicInteger runNumberRef = new AtomicInteger(); var totalUniqueEverReceived = new AtomicInteger(); @@ -51,7 +53,8 @@ static void runReplayerUntilSourceWasExhausted(TestContext rootContext, int numE var counter = new AtomicInteger(); var tupleReceiver = tupleListenerSupplier.get(); try { - runTrafficReplayer(rootContext, trafficSourceSupplier, endpoint, (t) -> { + var rootContext = rootContextSupplier.get(); + runTrafficReplayer(rootContext, ()->trafficSourceFactory.apply(rootContext), endpoint, (t) -> { if (runNumber != runNumberRef.get()) { // for an old replayer. I'm not sure why shutdown isn't blocking until all threads are dead, // but that behavior only impacts this test as far as I can tell. From 16a9010793598b4e97eb0a8eeda9ce066bf476a3 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 7 Feb 2024 18:30:52 -0500 Subject: [PATCH 89/94] Fix an edge case where a socketChannel might not have been created even though the channel context was. Also make minor fixes to other tests to make them more resilient. Signed-off-by: Greg Schohn --- .../migrations/replay/tracing/ReplayContexts.java | 8 +++++--- .../opensearch/migrations/replay/tracing/TracingTest.java | 3 +-- .../org/opensearch/migrations/tracing/TestContext.java | 5 ++++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index efebfc1ad..ceccfbe9d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -125,9 +125,11 @@ public void onSocketConnectionCreated() { @Override public void onSocketConnectionClosed() { - assert socketContext != null; - socketContext.close(); - socketContext = null; + if (socketContext != null) { + try (var toClose = socketContext) { + socketContext = null; + } + } meterIncrementEvent(getMetrics().channelClosedCounter); meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, -1); } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index f5954363e..2d37bb892 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -71,8 +71,7 @@ private void checkSpans(List recordedSpans) { try { return f.get(null); } catch (Exception e) { - Lombok.sneakyThrow(e); - return null; + throw Lombok.sneakyThrow(e); } }).toArray(String[]::new); Stream.of(keys).forEach(spanName -> { diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index 145eaf57e..3c7155504 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -18,6 +18,7 @@ public class TestContext extends RootReplayerContext implements AutoCloseable { public final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; public final ContextTracker contextTracker = new ContextTracker(); public final ChannelContextManager channelContextManager = new ChannelContextManager(this); + private final Object channelContextManagerLock = new Object(); public static TestContext withTracking(boolean tracing, boolean metrics) { return new TestContext(new InMemoryInstrumentationBundle(tracing, metrics)); @@ -48,7 +49,9 @@ public void onContextClosed(IScopedInstrumentationAttributes newScopedContext) { } public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficStreamContextForTest(ITrafficStreamKey tsk) { - return createTrafficStreamContextForStreamSource(channelContextManager.retainOrCreateContext(tsk), tsk); + synchronized (channelContextManagerLock) { + return createTrafficStreamContextForStreamSource(channelContextManager.retainOrCreateContext(tsk), tsk); + } } @Override From 4b102f7a0fa9c939e230889c58dffb47423c5dfd Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 7 Feb 2024 23:08:01 -0500 Subject: [PATCH 90/94] Handle SocketContexts as first class contexts rather than trying to implicitly manage them within a ChannelContext. I've also made a couple more test and production data structures more threadsafe. Signed-off-by: Greg Schohn --- .../replay/ClientConnectionPool.java | 10 +-- .../datatypes/ConnectionReplaySession.java | 5 +- .../replay/tracing/ChannelContextManager.java | 4 +- .../replay/tracing/IReplayContexts.java | 11 ++- .../replay/tracing/ReplayContexts.java | 72 ++++++++----------- .../replay/tracing/TracingTest.java | 4 +- 6 files changed, 50 insertions(+), 56 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index a34861380..a1b1f9b1c 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -38,13 +38,12 @@ public ConnectionReplaySession buildConnectionReplaySession(final IReplayContext throw new IllegalStateException("Event loop group is shutting down. Not creating a new session."); } log.trace("creating connection session"); - channelKeyCtx.onSocketConnectionCreated(); // arguably the most only thing that matters here is associating this item with an // EventLoop (thread). As the channel needs to be recycled, we'll come back to the // event loop that was tied to the original channel to bind all future channels to // the same event loop. That means that we don't have to worry about concurrent // accesses/changes to the OTHER value that we're storing within the cache. - return new ConnectionReplaySession(eventLoopGroup.next()); + return new ConnectionReplaySession(eventLoopGroup.next(), channelKeyCtx); } public ClientConnectionPool(URI serverUri, SslContext sslContext, int numThreads) { @@ -149,9 +148,6 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte var crs = dontCreate ? connectionId2ChannelCache.getIfPresent(channelKeyCtx.getConnectionId()) : connectionId2ChannelCache.get(channelKeyCtx.getConnectionId(), () -> buildConnectionReplaySession(channelKeyCtx)); - if (crs != null) { - crs.setChannelContext(channelKeyCtx); - } log.atTrace().setMessage(()->"returning ReplaySession=" + crs + " for " + channelKeyCtx.getConnectionId() + " from " + channelKeyCtx).log(); return crs; @@ -163,7 +159,7 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"Waiting for closeFuture() on channel"); - channelAndFutureWork.getChannelContext().onSocketConnectionClosed(); + channelAndFutureWork.getSocketContext().close(); channelAndFutureWork.getChannelFutureFuture().map(cff->cff .thenAccept(cf-> { cf.channel().close() @@ -177,7 +173,7 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte if (channelAndFutureWork.hasWorkRemaining()) { log.atWarn().setMessage(()->"Work items are still remaining for this connection session" + "(last associated with connection=" + - channelAndFutureWork.getChannelContext() + + channelAndFutureWork.getSocketContext() + "). " + channelAndFutureWork.calculateSizeSlowly() + " requests that were enqueued won't be run").log(); } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java index 91f6c8093..557452fba 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/datatypes/ConnectionReplaySession.java @@ -35,12 +35,13 @@ public class ConnectionReplaySession { @Getter @Setter - private IReplayContexts.IChannelKeyContext channelContext; + private final IReplayContexts.ISocketContext socketContext; - public ConnectionReplaySession(EventLoop eventLoop) { + public ConnectionReplaySession(EventLoop eventLoop, IReplayContexts.IChannelKeyContext channelKeyContext) { this.eventLoop = eventLoop; this.scheduleSequencer = new OnlineRadixSorter<>(0); this.schedule = new TimeToResponseFulfillmentFutureMap(); + this.socketContext = channelKeyContext.createSocketContext(); } @SneakyThrows diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java index 90fb8f9d0..eaa902ac3 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ChannelContextManager.java @@ -3,7 +3,7 @@ import lombok.Getter; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; -import java.util.HashMap; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; public class ChannelContextManager implements Function { @@ -40,7 +40,7 @@ boolean release() { } } - HashMap connectionToChannelContextMap = new HashMap<>(); + ConcurrentHashMap connectionToChannelContextMap = new ConcurrentHashMap<>(); public IReplayContexts.IChannelKeyContext apply(ITrafficStreamKey tsk) { return retainOrCreateContext(tsk); diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java index 15aa57ade..4008ea34a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/IReplayContexts.java @@ -88,9 +88,16 @@ default String getNodeId() { return getChannelKey().getNodeId(); } - void onSocketConnectionCreated(); + ISocketContext createSocketContext(); + } + + public interface ISocketContext extends IAccumulationScope, IWithTypedEnclosingScope { + public static final String ACTIVITY_NAME = ActivityNames.TCP_CONNECTION; - void onSocketConnectionClosed(); + @Override + default String getActivityName() { + return ACTIVITY_NAME; + } } public interface IKafkaRecordContext diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index ceccfbe9d..94bee2e3a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -10,52 +10,65 @@ import lombok.Getter; import lombok.NonNull; import lombok.Setter; +import lombok.extern.slf4j.Slf4j; import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.ITrafficStreamKey; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; import org.opensearch.migrations.tracing.BaseNestedSpanContext; import org.opensearch.migrations.tracing.CommonScopedMetricInstruments; import org.opensearch.migrations.tracing.DirectNestedSpanContext; -import org.opensearch.migrations.tracing.IInstrumentationAttributes; import org.opensearch.migrations.tracing.IScopedInstrumentationAttributes; import java.time.Duration; import java.time.Instant; import java.util.Optional; +@Slf4j public abstract class ReplayContexts extends IReplayContexts { public static final String COUNT_UNIT_STR = "count"; public static final String BYTES_UNIT_STR = "bytes"; public static class SocketContext - extends BaseNestedSpanContext - implements IScopedInstrumentationAttributes { + extends DirectNestedSpanContext + implements ISocketContext { - public static final String ACTIVITY_NAME = ActivityNames.TCP_CONNECTION; - - protected SocketContext(RootReplayerContext rootScope, ChannelKeyContext enclosingScope) { - super(rootScope, enclosingScope); + protected SocketContext(ChannelKeyContext enclosingScope) { + super(enclosingScope); initializeSpan(); - } - - @Override - public String getActivityName() { - return ACTIVITY_NAME; + meterIncrementEvent(getMetrics().channelCreatedCounter); + meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, 1); } public static class MetricInstruments extends CommonScopedMetricInstruments { + final LongUpDownCounter activeSocketConnectionsCounter; + final LongCounter channelCreatedCounter; + final LongCounter channelClosedCounter; + private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); + activeSocketConnectionsCounter = meter + .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); + channelCreatedCounter = meter + .counterBuilder(MetricNames.CONNECTIONS_OPENED).build(); + channelClosedCounter = meter + .counterBuilder(MetricNames.CONNECTIONS_CLOSED).build(); } } + @Override + public void sendMeterEventsForEnd() { + super.sendMeterEventsForEnd(); + meterIncrementEvent(getMetrics().channelClosedCounter); + meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, -1); + } + public static MetricInstruments makeMetrics(Meter meter) { return new MetricInstruments(meter, ACTIVITY_NAME); } @Override - public CommonScopedMetricInstruments getMetrics() { + public MetricInstruments getMetrics() { return getRootInstrumentationScope().socketInstruments; } } @@ -77,22 +90,18 @@ public ChannelKeyContext(RootReplayerContext rootScope, meterDeltaEvent(getMetrics().activeChannelCounter, 1); } + @Override + public ISocketContext createSocketContext() { + return new SocketContext(this); + } + public static class MetricInstruments extends CommonScopedMetricInstruments { final LongUpDownCounter activeChannelCounter; - final LongUpDownCounter activeSocketConnectionsCounter; - final LongCounter channelCreatedCounter; - final LongCounter channelClosedCounter; private MetricInstruments(Meter meter, String activityName) { super(meter, activityName); activeChannelCounter = meter .upDownCounterBuilder(MetricNames.ACTIVE_CHANNELS_YET_TO_BE_FULLY_DISCARDED).build(); - activeSocketConnectionsCounter = meter - .upDownCounterBuilder(MetricNames.ACTIVE_TARGET_CONNECTIONS).build(); - channelCreatedCounter = meter - .counterBuilder(MetricNames.CONNECTIONS_OPENED).build(); - channelClosedCounter = meter - .counterBuilder(MetricNames.CONNECTIONS_CLOSED).build(); } } @@ -114,25 +123,6 @@ public void sendMeterEventsForEnd() { super.sendMeterEventsForEnd(); meterDeltaEvent(getMetrics().activeChannelCounter, -1); } - - @Override - public void onSocketConnectionCreated() { - assert socketContext == null; - socketContext = new SocketContext(rootInstrumentationScope, this); - meterIncrementEvent(getMetrics().channelCreatedCounter); - meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, 1); - } - - @Override - public void onSocketConnectionClosed() { - if (socketContext != null) { - try (var toClose = socketContext) { - socketContext = null; - } - } - meterIncrementEvent(getMetrics().channelClosedCounter); - meterDeltaEvent(getMetrics().activeSocketConnectionsCounter, -1); - } } public static class KafkaRecordContext diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index 2d37bb892..95bc3ffc5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -49,8 +49,8 @@ public void tracingWorks() { } } } - channelCtx.onSocketConnectionCreated(); - channelCtx.onSocketConnectionClosed(); + try (var ctx = channelCtx.createSocketContext()) { + } } var recordedSpans = rootContext.inMemoryInstrumentationBundle.testSpanExporter.getFinishedSpanItems(); From 1cb8927d5b113a7c4a9c1f8f171c482822ca3b19 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 8 Feb 2024 10:20:18 -0500 Subject: [PATCH 91/94] Fix an issue with when to close the SocketContext and some memory leaks in test code. The SocketContext is now closed in the callback for the channel close rather than before we call close(). That should make a test failure due to duplicate close() calls much less likely and should also give us a better idea of when the socket was actually closed. There were some OutOfMemoryErrors coming back from the github action after 10 minutes of testing. I believe that this was due to having a number of InMemoryMetricExporters that periodic metric exporters were pumping to (in perpetuity, even after the test was complete). We were also potentially tracking lots of backtraces in the ContextTrackers. Both of these now have close() calls that clears all of that logged data. That's now called by TestContext.close(), which is wired for each InstrumentationTest. The next commit will tie off a lot more loose ends, but this commit was tested more extensively, hence the reason that I'm keeping them separate. Signed-off-by: Greg Schohn --- .../TestRootKafkaOffloaderContext.java | 3 +- .../InMemoryInstrumentationBundle.java | 67 +++++++++++++++++-- .../replay/ClientConnectionPool.java | 2 +- .../replay/tracing/KafkaConsumerContexts.java | 2 +- .../migrations/tracing/TestContext.java | 4 +- 5 files changed, 68 insertions(+), 10 deletions(-) diff --git a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java index 954a614f0..00029d717 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java +++ b/TrafficCapture/captureKafkaOffloader/src/test/java/org/opensearch/migrations/trafficcapture/kafkaoffloader/tracing/TestRootKafkaOffloaderContext.java @@ -17,8 +17,7 @@ public class TestRootKafkaOffloaderContext extends RootOtelContext implements IR private final InMemoryInstrumentationBundle inMemoryInstrumentationBundle; public static TestRootKafkaOffloaderContext withTracking() { - return new TestRootKafkaOffloaderContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), - InMemoryMetricExporter.create())); + return new TestRootKafkaOffloaderContext(new InMemoryInstrumentationBundle(true, true)); } public static TestRootKafkaOffloaderContext noTracking() { diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java index ef68f62e2..3d1ea5930 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/InMemoryInstrumentationBundle.java @@ -1,30 +1,79 @@ package org.opensearch.migrations.tracing; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.export.MetricExporter; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import lombok.Getter; +import lombok.NonNull; import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; @Getter -public class InMemoryInstrumentationBundle { +public class InMemoryInstrumentationBundle implements AutoCloseable { + + public static class LastMetricsExporter implements MetricExporter { + private final Queue finishedMetricItems = new ConcurrentLinkedQueue<>(); + boolean isStopped; + + public List getFinishedMetricItems() { + return Collections.unmodifiableList(new ArrayList<>(finishedMetricItems)); + } + + @Override + public CompletableResultCode export(@NonNull Collection metrics) { + if (isStopped) { + return CompletableResultCode.ofFailure(); + } + finishedMetricItems.clear(); + finishedMetricItems.addAll(metrics); + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + isStopped = true; + return CompletableResultCode.ofSuccess(); + } + + @Override + public AggregationTemporality getAggregationTemporality(@NonNull InstrumentType instrumentType) { + return AggregationTemporality.CUMULATIVE; + } + } + public final OpenTelemetrySdk openTelemetrySdk; public final InMemorySpanExporter testSpanExporter; - public final InMemoryMetricExporter testMetricExporter; + public final LastMetricsExporter testMetricExporter; public InMemoryInstrumentationBundle(boolean collectTraces, boolean collectMetrics) { this(collectTraces ? InMemorySpanExporter.create() : null, - collectMetrics ? InMemoryMetricExporter.create() : null); + collectMetrics ? new LastMetricsExporter() : null); } public InMemoryInstrumentationBundle(InMemorySpanExporter testSpanExporter, - InMemoryMetricExporter testMetricExporter) { + LastMetricsExporter testMetricExporter) { this.testSpanExporter = testSpanExporter; this.testMetricExporter = testMetricExporter; @@ -42,4 +91,14 @@ public InMemoryInstrumentationBundle(InMemorySpanExporter testSpanExporter, } openTelemetrySdk = otelBuilder.build(); } + + @Override + public void close() { + Optional.ofNullable(testMetricExporter).ifPresent(MetricExporter::close); + Optional.ofNullable(testSpanExporter).ifPresent(te -> { + te.close(); + te.reset(); + }); + } + } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java index a1b1f9b1c..2dddd1f2a 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/ClientConnectionPool.java @@ -159,11 +159,11 @@ public ConnectionReplaySession getCachedSession(IReplayContexts.IChannelKeyConte new StringTrackableCompletableFuture(new CompletableFuture<>(), ()->"Waiting for closeFuture() on channel"); - channelAndFutureWork.getSocketContext().close(); channelAndFutureWork.getChannelFutureFuture().map(cff->cff .thenAccept(cf-> { cf.channel().close() .addListener(closeFuture -> { + channelAndFutureWork.getSocketContext().close(); if (closeFuture.isSuccess()) { channelClosedFuture.future.complete(channelAndFutureWork.getInnerChannelFuture().channel()); } else { diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java index dc8b7a6bd..5c777568d 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/KafkaConsumerContexts.java @@ -151,7 +151,7 @@ private MetricInstruments(Meter meter, String activityName) { } public CommitScopeContext(@NonNull RootReplayerContext rootScope, - @NonNull IScopedInstrumentationAttributes enclosingScope) { + IScopedInstrumentationAttributes enclosingScope) { super(rootScope, enclosingScope); initializeSpan(); } diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index 3c7155504..0a4b78d52 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -25,8 +25,7 @@ public static TestContext withTracking(boolean tracing, boolean metrics) { } public static TestContext withAllTracking() { - return new TestContext(new InMemoryInstrumentationBundle(InMemorySpanExporter.create(), - InMemoryMetricExporter.create())); + return withTracking(true, true); } public static TestContext noOtelTracking() { @@ -56,6 +55,7 @@ public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficStreamContex @Override public void close() { + inMemoryInstrumentationBundle.close(); // Assertions.assertEquals("", contextTracker.getAllRemainingActiveScopes().entrySet().stream() // .map(kvp->kvp.getKey().toString()).collect(Collectors.joining())); } From 441ca40fa980b0cc430db4136f1087b4e7170d11 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 8 Feb 2024 10:21:17 -0500 Subject: [PATCH 92/94] Tie off more loose ends for memory leaks during test runs. Signed-off-by: Greg Schohn --- .../opensearch/migrations/tracing/ContextTracker.java | 8 +++++++- .../replay/FullReplayerWithTracingChecksTest.java | 8 ++++++-- .../migrations/replay/FullTrafficReplayerTest.java | 2 +- .../migrations/replay/TrafficReplayerRunner.java | 3 +-- .../datahandlers/NettyPacketToHttpConsumerTest.java | 10 +++++++--- .../migrations/replay/tracing/TracingTest.java | 10 ++++++++-- .../org/opensearch/migrations/tracing/TestContext.java | 1 + 7 files changed, 31 insertions(+), 11 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java index d33aeb597..ddb700892 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java @@ -8,7 +8,7 @@ import java.util.stream.Collectors; @Slf4j -public class ContextTracker { +public class ContextTracker implements AutoCloseable { private static class ExceptionForStackTracingOnly extends Exception { } @@ -57,4 +57,10 @@ public Map getAllRemainingActiveS .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } } + + public void close() { + synchronized (lockObject) { + scopedContextToCallDetails.clear(); + } + } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java index 2cf75f85f..74fbb8c49 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullReplayerWithTracingChecksTest.java @@ -35,6 +35,11 @@ @WrapWithNettyLeakDetection(disableLeakChecks = true) public class FullReplayerWithTracingChecksTest extends FullTrafficReplayerTest { + @Override + protected TestContext makeInstrumentationContext() { + return TestContext.withAllTracking(); + } + @Test public void testSingleStreamWithCloseIsCommitted() throws Throwable { var random = new Random(1); @@ -49,7 +54,7 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { var trafficSourceSupplier = new FullTrafficReplayerTest.ArrayCursorTrafficSourceFactory(List.of(trafficStreamWithJustClose)); TrafficReplayerRunner.runReplayerUntilSourceWasExhausted(0, httpServer.localhostEndpoint(), new FullTrafficReplayerTest.IndexWatchingListenerFactory(), - () -> TestContext.noOtelTracking(), + () -> TestContext.withAllTracking(), trafficSourceSupplier); Assertions.assertEquals(1, trafficSourceSupplier.nextReadCursor.get()); log.info("done"); @@ -58,7 +63,6 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { @ParameterizedTest @ValueSource(ints = {1,2}) public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) throws Throwable { - var rootContext = TestContext.withAllTracking(); var random = new Random(1); var httpServer = SimpleNettyHttpServer.makeServer(false, Duration.ofMillis(2), response->TestHttpServerContext.makeResponse(random, response)); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java index 8b50fc722..b5bdb7e5c 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/FullTrafficReplayerTest.java @@ -41,7 +41,7 @@ // to the test server, a shutdown will stop those work threads without letting them flush through all of their work // (since that could take a very long time) and some of the work might have been followed by resource releases. @WrapWithNettyLeakDetection(disableLeakChecks = true) -public class FullTrafficReplayerTest { +public class FullTrafficReplayerTest extends InstrumentationTest { public static final int INITIAL_STOP_REPLAYER_REQUEST_COUNT = 1; public static final String TEST_NODE_ID = "TestNodeId"; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java index 74d9e0920..fe8866fb2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerRunner.java @@ -52,8 +52,7 @@ static void runReplayerUntilSourceWasExhausted(int numExpectedRequests, URI endp int runNumber = runNumberRef.get(); var counter = new AtomicInteger(); var tupleReceiver = tupleListenerSupplier.get(); - try { - var rootContext = rootContextSupplier.get(); + try (var rootContext = rootContextSupplier.get()) { runTrafficReplayer(rootContext, ()->trafficSourceFactory.apply(rootContext), endpoint, (t) -> { if (runNumber != runNumberRef.get()) { // for an old replayer. I'm not sure why shutdown isn't blocking until all threads are dead, diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index a7be99523..a949db7db 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -91,6 +91,11 @@ public static void tearDownTestServer() throws Exception { }); } + @Override + protected TestContext makeInstrumentationContext() { + return TestContext.withTracking(false, true); + } + @Test public void testThatTestSetupIsCorrect() throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException @@ -147,7 +152,6 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls) throws Except @ValueSource(booleans = {false, true}) public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) throws SSLException, ExecutionException, InterruptedException { - var trackingContext = TestContext.withTracking(false, true); var testServer = testServers.get(useTls); var sslContext = !testServer.localhostEndpoint().getScheme().equalsIgnoreCase("https") ? null : SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); @@ -161,7 +165,7 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) new TestFlowController(), timeShifter); for (int j = 0; j < 2; ++j) { for (int i = 0; i < 2; ++i) { - var ctx = trackingContext.getTestConnectionRequestContext("TEST_" + i, j); + var ctx = rootContext.getTestConnectionRequestContext("TEST_" + i, j); var requestFinishFuture = TrafficReplayer.transformAndSendRequest(transformingHttpHandlerFactory, sendingFactory, ctx, Instant.now(), Instant.now(), () -> Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); @@ -181,7 +185,7 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) log.info("waiting for factory to shutdown: " + stopFuture); stopFuture.get(); Thread.sleep(200); // let metrics settle down - var allMetricData = trackingContext.inMemoryInstrumentationBundle.testMetricExporter.getFinishedMetricItems(); + var allMetricData = rootContext.inMemoryInstrumentationBundle.testMetricExporter.getFinishedMetricItems(); long tcpOpenConnectionCount = allMetricData.stream().filter(md->md.getName().startsWith("tcpConnectionCount")) .reduce((a,b)->b).get().getLongSumData().getPoints().stream().reduce((a,b)->b).get().getValue(); long connectionsOpenedCount = allMetricData.stream().filter(md->md.getName().startsWith("connectionsOpened")) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index 95bc3ffc5..17e0300d4 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -8,6 +8,7 @@ import org.opensearch.migrations.replay.datatypes.ISourceTrafficChannelKey; import org.opensearch.migrations.replay.datatypes.PojoTrafficStreamKeyAndContext; import org.opensearch.migrations.replay.datatypes.UniqueReplayerRequestKey; +import org.opensearch.migrations.tracing.InstrumentationTest; import org.opensearch.migrations.tracing.TestContext; import java.time.Duration; @@ -17,10 +18,15 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class TracingTest { +public class TracingTest extends InstrumentationTest { + + @Override + protected TestContext makeInstrumentationContext() { + return TestContext.withAllTracking(); + } + @Test public void tracingWorks() { - TestContext rootContext = TestContext.withAllTracking(); var tssk = new ISourceTrafficChannelKey.PojoImpl("n", "c"); try (var channelCtx = rootContext.createChannelContext(tssk); var kafkaRecordCtx = diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java index 0a4b78d52..1c5927e46 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/tracing/TestContext.java @@ -55,6 +55,7 @@ public IReplayContexts.ITrafficStreamsLifecycleContext createTrafficStreamContex @Override public void close() { + contextTracker.close(); inMemoryInstrumentationBundle.close(); // Assertions.assertEquals("", contextTracker.getAllRemainingActiveScopes().entrySet().stream() // .map(kvp->kvp.getKey().toString()).collect(Collectors.joining())); From dc762394632b510ef68a157f60e490bbb9bed8e0 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 8 Feb 2024 23:32:04 -0500 Subject: [PATCH 93/94] Test fixes + make scheduled contexts use System.nanotime instead of Instants, even if an Instant is what's passed in to the constructor Signed-off-by: Greg Schohn --- .../migrations/tracing/BaseNestedSpanContext.java | 5 +++-- .../migrations/tracing/BaseSpanContext.java | 1 + .../migrations/tracing/ContextTracker.java | 8 ++++++++ .../migrations/replay/tracing/ReplayContexts.java | 12 +++++++----- .../datahandlers/NettyPacketToHttpConsumerTest.java | 7 +++++++ .../migrations/replay/tracing/TracingTest.java | 2 +- 6 files changed, 27 insertions(+), 8 deletions(-) diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java index 4765cc6f1..a7f35dc36 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseNestedSpanContext.java @@ -11,12 +11,13 @@ import java.util.stream.Stream; public abstract class BaseNestedSpanContext - extends BaseSpanContext { + + extends BaseSpanContext +{ final T enclosingScope; protected BaseNestedSpanContext(S rootScope, T enclosingScope) { super(rootScope); - rootScope.onContextCreated(this); this.enclosingScope = enclosingScope; } diff --git a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java index 04b048030..5885cf774 100644 --- a/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java +++ b/TrafficCapture/coreUtilities/src/main/java/org/opensearch/migrations/tracing/BaseSpanContext.java @@ -23,6 +23,7 @@ public abstract class BaseSpanContext public BaseSpanContext(S rootScope) { this.startNanoTime = System.nanoTime(); this.rootInstrumentationScope = rootScope; + rootScope.onContextCreated(this); } protected static AttributesBuilder addAttributeIfPresent(AttributesBuilder attributesBuilder, diff --git a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java index ddb700892..a3214ce3c 100644 --- a/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java +++ b/TrafficCapture/coreUtilities/src/testFixtures/java/org/opensearch/migrations/tracing/ContextTracker.java @@ -25,9 +25,13 @@ public CallDetails() { private final Map scopedContextToCallDetails = new WeakHashMap<>(); private final Object lockObject = new Object(); + private boolean isClosed; public void onCreated(IScopedInstrumentationAttributes ctx) { synchronized (lockObject) { + if (isClosed) { + return; + } var oldItem = scopedContextToCallDetails.putIfAbsent(ctx, new CallDetails()); assert oldItem == null; } @@ -35,6 +39,9 @@ public void onCreated(IScopedInstrumentationAttributes ctx) { public void onClosed(IScopedInstrumentationAttributes ctx) { synchronized (lockObject) { + if (isClosed) { + return; + } var newExceptionStack = new ExceptionForStackTracingOnly(); var oldCallDetails = scopedContextToCallDetails.get(ctx); assert oldCallDetails != null; @@ -61,6 +68,7 @@ public Map getAllRemainingActiveS public void close() { synchronized (lockObject) { scopedContextToCallDetails.clear(); + isClosed = true; } } } diff --git a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java index 94bee2e3a..20066be81 100644 --- a/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java +++ b/TrafficCapture/trafficReplayer/src/main/java/org/opensearch/migrations/replay/tracing/ReplayContexts.java @@ -299,7 +299,8 @@ public TargetRequestContext createTargetRequestContext() { @Override public IReplayContexts.IScheduledContext createScheduledContext(Instant timestamp) { - return new ReplayContexts.ScheduledContext(this, timestamp); + return new ReplayContexts.ScheduledContext(this, + Duration.between(Instant.now(), timestamp).toNanos()); } @Override @@ -516,11 +517,11 @@ public static class ScheduledContext extends DirectNestedSpanContext implements IReplayContexts.IScheduledContext { - private final Instant scheduledFor; + private final long scheduledForNanoTime; - public ScheduledContext(HttpTransactionContext enclosingScope, Instant scheduledFor) { + public ScheduledContext(HttpTransactionContext enclosingScope, long scheduledForNanoTime) { super(enclosingScope); - this.scheduledFor = scheduledFor; + this.scheduledForNanoTime = scheduledForNanoTime; initializeSpan(); } @@ -544,7 +545,8 @@ private MetricInstruments(Meter meter, String activityName) { @Override public void sendMeterEventsForEnd() { super.sendMeterEventsForEnd(); - meterHistogramMillis(getMetrics().lag, Duration.between(scheduledFor, Instant.now())); + meterHistogramMillis(getMetrics().lag, + Duration.ofNanos(Math.max(0, System.nanoTime() - scheduledForNanoTime))); } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index a949db7db..c79986c89 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -184,6 +184,13 @@ public void testThatConnectionsAreKeptAliveAndShared(boolean useTls) var stopFuture = sendingFactory.closeConnectionsAndShutdown(); log.info("waiting for factory to shutdown: " + stopFuture); stopFuture.get(); + } + + @ParameterizedTest + @ValueSource(booleans = {false, true}) + @WrapWithNettyLeakDetection(repetitions = 1) + public void testMetricCountsFor_testThatConnectionsAreKeptAliveAndShared(boolean useTls) throws Exception { + testThatConnectionsAreKeptAliveAndShared(useTls); Thread.sleep(200); // let metrics settle down var allMetricData = rootContext.inMemoryInstrumentationBundle.testMetricExporter.getFinishedMetricItems(); long tcpOpenConnectionCount = allMetricData.stream().filter(md->md.getName().startsWith("tcpConnectionCount")) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java index 17e0300d4..d6cbd2fc2 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/tracing/TracingTest.java @@ -41,7 +41,7 @@ public void tracingWorks() { } try (var ctx = httpCtx.createTransformationContext()) { } - try (var ctx = httpCtx.createScheduledContext(Instant.now().plus(Duration.ofSeconds(1)))) { + try (var ctx = httpCtx.createScheduledContext(Instant.now())) { } try (var targetRequestCtx = httpCtx.createTargetRequestContext()) { try (var ctx = targetRequestCtx.createHttpSendingContext()) { From 8a875b39e08aaab5d009391363f052efb31759b1 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 9 Feb 2024 00:16:34 -0500 Subject: [PATCH 94/94] Set the x-ray exporter attribute index_all_attributes=true so that attributes end up as annotations instead of metadata so that they can be searched. To search in x-ray, use `annotationId.ATTRIBUTE_NAME="..."`. Signed-off-by: Greg Schohn --- .../dockerSolution/otelConfigs/configSnippets/awsXRay.yaml | 1 + .../composeExtensions/configs/otel-config-everything.yaml | 1 + .../src/main/docker/composeExtensions/otel-everything.yml | 4 ++-- .../src/main/docker/otelCollector/otel-config-aws-debug.yaml | 1 + .../src/main/docker/otelCollector/otel-config-aws.yaml | 1 + 5 files changed, 6 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml index a0fb4a1c2..37d4a0474 100644 --- a/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml +++ b/TrafficCapture/dockerSolution/otelConfigs/configSnippets/awsXRay.yaml @@ -1,5 +1,6 @@ exporters: awsxray: + index_all_attributes: true service: pipelines: diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml index f0ee7d695..f9ba3eedf 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/configs/otel-config-everything.yaml @@ -61,6 +61,7 @@ exporters: awsemf: namespace: 'TrafficCaptureReplay' awsxray: + index_all_attributes: true prometheus: endpoint: "0.0.0.0:8889" send_timestamps: true diff --git a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml index 2ee2b15f7..6c3cbe32e 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml +++ b/TrafficCapture/dockerSolution/src/main/docker/composeExtensions/otel-everything.yml @@ -18,6 +18,6 @@ services: depends_on: - jaeger environment: - - AWS_REGION=us-east-1 - - AWS_DEFAULT_REGION=us-east-1 + - AWS_REGION=us-east-2 + - AWS_DEFAULT_REGION=us-east-2 - AWS_PROFILE=default \ No newline at end of file diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml index 30ff91294..b400f2393 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws-debug.yaml @@ -49,6 +49,7 @@ exporters: awsemf: namespace: 'TrafficCaptureReplay' awsxray: + index_all_attributes: true service: extensions: [health_check] pipelines: diff --git a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml index 3d1cf1a51..aad185e1a 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml +++ b/TrafficCapture/dockerSolution/src/main/docker/otelCollector/otel-config-aws.yaml @@ -45,6 +45,7 @@ exporters: awsemf: namespace: 'TrafficCaptureReplay' awsxray: + index_all_attributes: true service: extensions: [health_check] pipelines: