From 955e86f0bf7d5af889fdf853ff21a984fdb9edfa Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 16 May 2018 14:57:55 -0600 Subject: [PATCH 01/11] Require pipelining --- .../http/netty4/Netty4HttpChannel.java | 40 +++----- .../http/netty4/Netty4HttpRequestHandler.java | 32 +++---- .../netty4/Netty4HttpServerTransport.java | 12 +-- .../pipelining/HttpPipelinedRequest.java | 88 ----------------- .../pipelining/HttpPipelinedResponse.java | 94 ------------------ .../pipelining/HttpPipeliningHandler.java | 96 ++++++------------- .../http/netty4/Netty4HttpChannelTests.java | 23 ++--- .../Netty4HttpServerPipeliningTests.java | 82 +++------------- .../netty4/Netty4PipeliningDisabledIT.java | 80 ---------------- ...EnabledIT.java => Netty4PipeliningIT.java} | 2 +- .../Netty4HttpPipeliningHandlerTests.java | 27 +++--- .../common/settings/ClusterSettings.java | 1 - .../http/HttpPipelinedRequest.java | 38 ++++++++ .../http/HttpPipelinedResponse.java | 49 ++++++++++ .../http/HttpPipeliningAggregator.java | 78 +++++++++++++++ .../http/HttpTransportSettings.java | 2 - 16 files changed, 263 insertions(+), 481 deletions(-) delete mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java delete mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java delete mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java rename modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/{Netty4PipeliningEnabledIT.java => Netty4PipeliningIT.java} (97%) create mode 100644 server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java create mode 100644 server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java create mode 100644 server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index 6e39a7f50d2cd..e5c135b6e5aeb 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -41,8 +41,8 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpHandlingSettings; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; -import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; @@ -59,29 +59,24 @@ final class Netty4HttpChannel extends AbstractRestChannel { private final Netty4HttpServerTransport transport; private final Channel channel; private final FullHttpRequest nettyRequest; - private final HttpPipelinedRequest pipelinedRequest; + private final int sequence; private final ThreadContext threadContext; private final HttpHandlingSettings handlingSettings; /** - * @param transport The corresponding NettyHttpServerTransport where this channel belongs to. - * @param request The request that is handled by this channel. - * @param pipelinedRequest If HTTP pipelining is enabled provide the corresponding pipelined request. May be null if - * HTTP pipelining is disabled. - * @param handlingSettings true iff error messages should include stack traces. - * @param threadContext the thread context for the channel + * @param transport The corresponding NettyHttpServerTransport where this channel belongs to. + * @param request The request that is handled by this channel. + * @param sequence The pipelining sequence number for this request + * @param handlingSettings true if error messages should include stack traces. + * @param threadContext the thread context for the channel */ - Netty4HttpChannel( - final Netty4HttpServerTransport transport, - final Netty4HttpRequest request, - final HttpPipelinedRequest pipelinedRequest, - final HttpHandlingSettings handlingSettings, - final ThreadContext threadContext) { + Netty4HttpChannel(Netty4HttpServerTransport transport, Netty4HttpRequest request, int sequence, HttpHandlingSettings handlingSettings, + ThreadContext threadContext) { super(request, handlingSettings.getDetailedErrorsEnabled()); this.transport = transport; this.channel = request.getChannel(); this.nettyRequest = request.request(); - this.pipelinedRequest = pipelinedRequest; + this.sequence = sequence; this.threadContext = threadContext; this.handlingSettings = handlingSettings; } @@ -129,7 +124,7 @@ public void sendResponse(RestResponse response) { final ChannelPromise promise = channel.newPromise(); if (releaseContent) { - promise.addListener(f -> ((Releasable)content).close()); + promise.addListener(f -> ((Releasable) content).close()); } if (releaseBytesStreamOutput) { @@ -140,13 +135,9 @@ public void sendResponse(RestResponse response) { promise.addListener(ChannelFutureListener.CLOSE); } - final Object msg; - if (pipelinedRequest != null) { - msg = pipelinedRequest.createHttpResponse(resp, promise); - } else { - msg = resp; - } - channel.writeAndFlush(msg, promise); + HttpPipelinedResponse newResponse = new HttpPipelinedResponse<>(sequence, resp, promise); + + channel.writeAndFlush(newResponse, promise); releaseContent = false; releaseBytesStreamOutput = false; } finally { @@ -156,9 +147,6 @@ public void sendResponse(RestResponse response) { if (releaseBytesStreamOutput) { bytesOutputOrNull().close(); } - if (pipelinedRequest != null) { - pipelinedRequest.release(); - } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 74429c8dda9b7..643ce3c0be67f 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -30,41 +30,30 @@ import io.netty.handler.codec.http.HttpHeaders; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.netty4.Netty4Utils; import java.util.Collections; @ChannelHandler.Sharable -class Netty4HttpRequestHandler extends SimpleChannelInboundHandler { +class Netty4HttpRequestHandler extends SimpleChannelInboundHandler> { private final Netty4HttpServerTransport serverTransport; private final HttpHandlingSettings handlingSettings; - private final boolean httpPipeliningEnabled; private final ThreadContext threadContext; Netty4HttpRequestHandler(Netty4HttpServerTransport serverTransport, HttpHandlingSettings handlingSettings, ThreadContext threadContext) { this.serverTransport = serverTransport; - this.httpPipeliningEnabled = serverTransport.pipelining; this.handlingSettings = handlingSettings; this.threadContext = threadContext; } @Override - protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception { - final FullHttpRequest request; - final HttpPipelinedRequest pipelinedRequest; - if (this.httpPipeliningEnabled && msg instanceof HttpPipelinedRequest) { - pipelinedRequest = (HttpPipelinedRequest) msg; - request = (FullHttpRequest) pipelinedRequest.last(); - } else { - pipelinedRequest = null; - request = (FullHttpRequest) msg; - } + protected void channelRead0(ChannelHandlerContext ctx, HttpPipelinedRequest msg) throws Exception { + final FullHttpRequest request = msg.getRequest(); - boolean success = false; try { final FullHttpRequest copy = @@ -111,7 +100,7 @@ protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Except Netty4HttpChannel innerChannel; try { innerChannel = - new Netty4HttpChannel(serverTransport, httpRequest, pipelinedRequest, handlingSettings, threadContext); + new Netty4HttpChannel(serverTransport, httpRequest, msg.getSequence(), handlingSettings, threadContext); } catch (final IllegalArgumentException e) { if (badRequestCause == null) { badRequestCause = e; @@ -126,7 +115,7 @@ protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Except copy, ctx.channel()); innerChannel = - new Netty4HttpChannel(serverTransport, innerRequest, pipelinedRequest, handlingSettings, threadContext); + new Netty4HttpChannel(serverTransport, innerRequest, msg.getSequence(), handlingSettings, threadContext); } channel = innerChannel; } @@ -138,12 +127,13 @@ protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Except } else { serverTransport.dispatchRequest(httpRequest, channel); } - success = true; } finally { + // As we have copied the buffer, we can release the request + request.release(); // the request is otherwise released in case of dispatch - if (success == false && pipelinedRequest != null) { - pipelinedRequest.release(); - } +// if (success == false && msg != null) { +// msg.release(); +// } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 8e5bace46aa7e..bf8879b6064b5 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -99,7 +99,6 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_REUSE_ADDRESS; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_SEND_BUFFER_SIZE; -import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING; import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS; import static org.elasticsearch.http.netty4.cors.Netty4CorsHandler.ANY_ORIGIN; @@ -162,8 +161,6 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { protected final int workerCount; - protected final boolean pipelining; - protected final int pipeliningMaxEvents; /** @@ -226,14 +223,13 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic ByteSizeValue receivePredictor = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings); recvByteBufAllocator = new FixedRecvByteBufAllocator(receivePredictor.bytesAsInt()); - this.pipelining = SETTING_PIPELINING.get(settings); this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); this.corsConfig = buildCorsConfig(settings); logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " + - "receive_predictor[{}], max_composite_buffer_components[{}], pipelining[{}], pipelining_max_events[{}]", + "receive_predictor[{}], max_composite_buffer_components[{}], pipelining_max_events[{}]", maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength, receivePredictor, maxCompositeBufferComponents, - pipelining, pipeliningMaxEvents); + pipeliningMaxEvents); } public Settings settings() { @@ -452,9 +448,7 @@ protected void initChannel(Channel ch) throws Exception { if (SETTING_CORS_ENABLED.get(transport.settings())) { ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.getCorsConfig())); } - if (transport.pipelining) { - ch.pipeline().addLast("pipelining", new HttpPipeliningHandler(transport.logger, transport.pipeliningMaxEvents)); - } + ch.pipeline().addLast("pipelining", new HttpPipeliningHandler(transport.logger, transport.pipeliningMaxEvents)); ch.pipeline().addLast("handler", requestHandler); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java deleted file mode 100644 index be1669c60c297..0000000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedRequest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.http.netty4.pipelining; - -import io.netty.channel.ChannelPromise; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.handler.codec.http.LastHttpContent; -import io.netty.util.ReferenceCounted; - -/** - * Permits downstream channel events to be ordered and signalled as to whether more are to come for - * a given sequence. - */ -public class HttpPipelinedRequest implements ReferenceCounted { - - private final LastHttpContent last; - private final int sequence; - - public HttpPipelinedRequest(final LastHttpContent last, final int sequence) { - this.last = last; - this.sequence = sequence; - } - - public LastHttpContent last() { - return last; - } - - public HttpPipelinedResponse createHttpResponse(final FullHttpResponse response, final ChannelPromise promise) { - return new HttpPipelinedResponse(response, promise, sequence); - } - - @Override - public int refCnt() { - return last.refCnt(); - } - - @Override - public ReferenceCounted retain() { - last.retain(); - return this; - } - - @Override - public ReferenceCounted retain(int increment) { - last.retain(increment); - return this; - } - - @Override - public ReferenceCounted touch() { - last.touch(); - return this; - } - - @Override - public ReferenceCounted touch(Object hint) { - last.touch(hint); - return this; - } - - @Override - public boolean release() { - return last.release(); - } - - @Override - public boolean release(int decrement) { - return last.release(decrement); - } - -} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java deleted file mode 100644 index 6b6db94d69a59..0000000000000 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipelinedResponse.java +++ /dev/null @@ -1,94 +0,0 @@ -package org.elasticsearch.http.netty4.pipelining; - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import io.netty.channel.ChannelPromise; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.util.ReferenceCounted; - -class HttpPipelinedResponse implements Comparable, ReferenceCounted { - - private final FullHttpResponse response; - private final ChannelPromise promise; - private final int sequence; - - HttpPipelinedResponse(FullHttpResponse response, ChannelPromise promise, int sequence) { - this.response = response; - this.promise = promise; - this.sequence = sequence; - } - - public FullHttpResponse response() { - return response; - } - - public ChannelPromise promise() { - return promise; - } - - public int sequence() { - return sequence; - } - - @Override - public int compareTo(HttpPipelinedResponse o) { - return Integer.compare(sequence, o.sequence); - } - - @Override - public int refCnt() { - return response.refCnt(); - } - - @Override - public ReferenceCounted retain() { - response.retain(); - return this; - } - - @Override - public ReferenceCounted retain(int increment) { - response.retain(increment); - return this; - } - - @Override - public ReferenceCounted touch() { - response.touch(); - return this; - } - - @Override - public ReferenceCounted touch(Object hint) { - response.touch(hint); - return this; - } - - @Override - public boolean release() { - return response.release(); - } - - @Override - public boolean release(int decrement) { - return response.release(decrement); - } - -} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java index a90027c81482b..20e6699d612c2 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java @@ -22,50 +22,44 @@ import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.LastHttpContent; import org.apache.logging.log4j.Logger; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedResponse; +import org.elasticsearch.http.HttpPipeliningAggregator; import org.elasticsearch.transport.netty4.Netty4Utils; import java.nio.channels.ClosedChannelException; +import java.util.ArrayList; import java.util.Collections; -import java.util.PriorityQueue; +import java.util.List; /** * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their corresponding requests. */ public class HttpPipeliningHandler extends ChannelDuplexHandler { - // we use a priority queue so that responses are ordered by their sequence number - private final PriorityQueue holdingQueue; - private final Logger logger; - private final int maxEventsHeld; - - /* - * The current read and write sequence numbers. Read sequence numbers are attached to requests in the order they are read from the - * channel, and then transferred to responses. A response is not written to the channel context until its sequence number matches the - * current write sequence, implying that all preceding messages have been written. - */ - private int readSequence; - private int writeSequence; + private final HttpPipeliningAggregator aggregator; /** * Construct a new pipelining handler; this handler should be used downstream of HTTP decoding/aggregation. * - * @param logger for logging unexpected errors + * @param logger for logging unexpected errors * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel connection; this is * required as events cannot queue up indefinitely */ public HttpPipeliningHandler(Logger logger, final int maxEventsHeld) { this.logger = logger; - this.maxEventsHeld = maxEventsHeld; - this.holdingQueue = new PriorityQueue<>(1); + this.aggregator = new HttpPipeliningAggregator<>(maxEventsHeld); } @Override public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception { if (msg instanceof LastHttpContent) { - ctx.fireChannelRead(new HttpPipelinedRequest(((LastHttpContent) msg).retain(), readSequence++)); + HttpPipelinedRequest pipelinedRequest = aggregator.read(((LastHttpContent) msg).retain()); + ctx.fireChannelRead(pipelinedRequest); } else { ctx.fireChannelRead(msg); } @@ -74,67 +68,39 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw @Override public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { if (msg instanceof HttpPipelinedResponse) { - final HttpPipelinedResponse current = (HttpPipelinedResponse) msg; - /* - * We attach the promise to the response. When we invoke a write on the channel with the response, we must ensure that we invoke - * the write methods that accept the same promise that we have attached to the response otherwise as the response proceeds - * through the handler pipeline a different promise will be used until reaching this handler. Therefore, we assert here that the - * attached promise is identical to the provided promise as a safety mechanism that we are respecting this. - */ - assert current.promise() == promise; - - boolean channelShouldClose = false; - - synchronized (holdingQueue) { - if (holdingQueue.size() < maxEventsHeld) { - holdingQueue.add(current); - - while (!holdingQueue.isEmpty()) { - /* - * Since the response with the lowest sequence number is the top of the priority queue, we know if its sequence - * number does not match the current write sequence number then we have not processed all preceding responses yet. - */ - final HttpPipelinedResponse top = holdingQueue.peek(); - if (top.sequence() != writeSequence) { - break; - } - holdingQueue.remove(); - /* - * We must use the promise attached to the response; this is necessary since are going to hold a response until all - * responses that precede it in the pipeline are written first. Note that the promise from the method invocation is - * not ignored, it will already be attached to an existing response and consumed when that response is drained. - */ - ctx.write(top.response(), top.promise()); - writeSequence++; - } - } else { - channelShouldClose = true; - } - } - - if (channelShouldClose) { + synchronized (aggregator) { + HttpPipelinedResponse response = (HttpPipelinedResponse) msg; + boolean success = false; try { + ArrayList> responsesToWrite = aggregator.write(response); + success = true; + for (HttpPipelinedResponse responseToWrite : responsesToWrite) { + ctx.write(responseToWrite.getResponse(), responseToWrite.getListener()); + } + } catch (IllegalStateException e) { Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); } finally { - current.release(); - promise.setSuccess(); + if (success == false) { + response.getListener().setFailure(new ClosedChannelException()); + } } } + } else { ctx.write(msg, promise); } } @Override - public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception { - if (holdingQueue.isEmpty() == false) { + public void close(ChannelHandlerContext ctx, ChannelPromise promise) { + List> inflightResponses = aggregator.removeAllInflightResponses(); + + if (inflightResponses.isEmpty() == false) { ClosedChannelException closedChannelException = new ClosedChannelException(); - HttpPipelinedResponse pipelinedResponse; - while ((pipelinedResponse = holdingQueue.poll()) != null) { + for (HttpPipelinedResponse inflightResponse : inflightResponses) { try { - pipelinedResponse.release(); - pipelinedResponse.promise().setFailure(closedChannelException); - } catch (Exception e) { + inflightResponse.getListener().setFailure(closedChannelException); + } catch (RuntimeException e) { logger.error("unexpected error while releasing pipelined http responses", e); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java index 0ef1ea585b11c..d6ce4a03f0579 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -57,10 +57,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.http.HttpHandlingSettings; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; -import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestResponse; @@ -203,6 +204,7 @@ public void testThatAnyOriginWorks() { assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), nullValue()); } + @SuppressWarnings("unchecked") public void testHeadersSet() { Settings settings = Settings.builder().build(); try (Netty4HttpServerTransport httpServerTransport = @@ -212,12 +214,12 @@ public void testHeadersSet() { final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); httpRequest.headers().add(HttpHeaderNames.ORIGIN, "remote"); final WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); - Netty4HttpRequest request = new Netty4HttpRequest(xContentRegistry(), httpRequest, writeCapturingChannel); + final Netty4HttpRequest request = new Netty4HttpRequest(xContentRegistry(), httpRequest, writeCapturingChannel); HttpHandlingSettings handlingSettings = httpServerTransport.httpHandlingSettings; // send a response Netty4HttpChannel channel = - new Netty4HttpChannel(httpServerTransport, request, null, handlingSettings, threadPool.getThreadContext()); + new Netty4HttpChannel(httpServerTransport, request, 1, handlingSettings, threadPool.getThreadContext()); TestResponse resp = new TestResponse(); final String customHeader = "custom-header"; final String customHeaderValue = "xyz"; @@ -227,7 +229,7 @@ public void testHeadersSet() { // inspect what was written List writtenObjects = writeCapturingChannel.getWrittenObjects(); assertThat(writtenObjects.size(), is(1)); - HttpResponse response = (HttpResponse) writtenObjects.get(0); + HttpResponse response = ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); assertThat(response.headers().get("non-existent-header"), nullValue()); assertThat(response.headers().get(customHeader), equalTo(customHeaderValue)); assertThat(response.headers().get(HttpHeaderNames.CONTENT_LENGTH), equalTo(Integer.toString(resp.content().length()))); @@ -243,10 +245,9 @@ public void testReleaseOnSendToClosedChannel() { final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); final EmbeddedChannel embeddedChannel = new EmbeddedChannel(); final Netty4HttpRequest request = new Netty4HttpRequest(registry, httpRequest, embeddedChannel); - final HttpPipelinedRequest pipelinedRequest = randomBoolean() ? new HttpPipelinedRequest(request.request(), 1) : null; HttpHandlingSettings handlingSettings = httpServerTransport.httpHandlingSettings; final Netty4HttpChannel channel = - new Netty4HttpChannel(httpServerTransport, request, pipelinedRequest, handlingSettings, threadPool.getThreadContext()); + new Netty4HttpChannel(httpServerTransport, request, 1, handlingSettings, threadPool.getThreadContext()); final TestResponse response = new TestResponse(bigArrays); assertThat(response.content(), instanceOf(Releasable.class)); embeddedChannel.close(); @@ -263,10 +264,9 @@ public void testReleaseOnSendToChannelAfterException() throws IOException { final FullHttpRequest httpRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); final EmbeddedChannel embeddedChannel = new EmbeddedChannel(); final Netty4HttpRequest request = new Netty4HttpRequest(registry, httpRequest, embeddedChannel); - final HttpPipelinedRequest pipelinedRequest = randomBoolean() ? new HttpPipelinedRequest(request.request(), 1) : null; HttpHandlingSettings handlingSettings = httpServerTransport.httpHandlingSettings; final Netty4HttpChannel channel = - new Netty4HttpChannel(httpServerTransport, request, pipelinedRequest, handlingSettings, threadPool.getThreadContext()); + new Netty4HttpChannel(httpServerTransport, request, 1, handlingSettings, threadPool.getThreadContext()); final BytesRestResponse response = new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, JsonXContent.contentBuilder().startObject().endObject()); assertThat(response.content(), not(instanceOf(Releasable.class))); @@ -312,7 +312,7 @@ public void testConnectionClose() throws Exception { assertTrue(embeddedChannel.isOpen()); HttpHandlingSettings handlingSettings = httpServerTransport.httpHandlingSettings; final Netty4HttpChannel channel = - new Netty4HttpChannel(httpServerTransport, request, null, handlingSettings, threadPool.getThreadContext()); + new Netty4HttpChannel(httpServerTransport, request, 1, handlingSettings, threadPool.getThreadContext()); final TestResponse resp = new TestResponse(); channel.sendResponse(resp); assertThat(embeddedChannel.isOpen(), equalTo(!close)); @@ -323,6 +323,7 @@ private FullHttpResponse executeRequest(final Settings settings, final String ho return executeRequest(settings, null, host); } + @SuppressWarnings("unchecked") private FullHttpResponse executeRequest(final Settings settings, final String originValue, final String host) { // construct request and send it over the transport layer try (Netty4HttpServerTransport httpServerTransport = @@ -340,13 +341,13 @@ private FullHttpResponse executeRequest(final Settings settings, final String or HttpHandlingSettings handlingSettings = httpServerTransport.httpHandlingSettings; Netty4HttpChannel channel = - new Netty4HttpChannel(httpServerTransport, request, null, handlingSettings, threadPool.getThreadContext()); + new Netty4HttpChannel(httpServerTransport, request, 1, handlingSettings, threadPool.getThreadContext()); channel.sendResponse(new TestResponse()); // get the response List writtenObjects = writeCapturingChannel.getWrittenObjects(); assertThat(writtenObjects.size(), is(1)); - return (FullHttpResponse) writtenObjects.get(0); + return ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 0eb14a8a76e9b..e51ff9ed5ae8b 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -38,9 +38,10 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.NullDispatcher; -import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -52,16 +53,11 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; -import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.hasSize; /** * This test just tests, if he pipelining works in general with out any connection the Elasticsearch handler @@ -85,7 +81,7 @@ public void shutdown() throws Exception { } } - public void testThatHttpPipeliningWorksWhenEnabled() throws Exception { + public void testThatHttpPipeliningWorks() throws Exception { final Settings settings = Settings.builder() .put("http.pipelining", true) .put("http.port", "0") @@ -112,48 +108,6 @@ public void testThatHttpPipeliningWorksWhenEnabled() throws Exception { } } - public void testThatHttpPipeliningCanBeDisabled() throws Exception { - final Settings settings = Settings.builder() - .put("http.pipelining", false) - .put("http.port", "0") - .build(); - try (HttpServerTransport httpServerTransport = new CustomNettyHttpServerTransport(settings)) { - httpServerTransport.start(); - final TransportAddress transportAddress = randomFrom(httpServerTransport.boundAddress().boundAddresses()); - - final int numberOfRequests = randomIntBetween(4, 16); - final Set slowIds = new HashSet<>(); - final List requests = new ArrayList<>(numberOfRequests); - for (int i = 0; i < numberOfRequests; i++) { - if (rarely()) { - requests.add("/slow/" + i); - slowIds.add(i); - } else { - requests.add("/" + i); - } - } - - try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { - Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{})); - List responseBodies = new ArrayList<>(Netty4HttpClient.returnHttpResponseBodies(responses)); - // we can not be sure about the order of the responses, but the slow ones should come last - assertThat(responseBodies, hasSize(numberOfRequests)); - for (int i = 0; i < numberOfRequests - slowIds.size(); i++) { - assertThat(responseBodies.get(i), matches("/\\d+")); - } - - final Set ids = new HashSet<>(); - for (int i = 0; i < slowIds.size(); i++) { - final String response = responseBodies.get(numberOfRequests - slowIds.size() + i); - assertThat(response, matches("/slow/\\d+" )); - assertTrue(ids.add(Integer.parseInt(response.split("/")[2]))); - } - - assertThat(slowIds, equalTo(ids)); - } - } - } - class CustomNettyHttpServerTransport extends Netty4HttpServerTransport { private final ExecutorService executorService = Executors.newCachedThreadPool(); @@ -196,7 +150,7 @@ protected void initChannel(Channel ch) throws Exception { } - class PossiblySlowUpstreamHandler extends SimpleChannelInboundHandler { + class PossiblySlowUpstreamHandler extends SimpleChannelInboundHandler> { private final ExecutorService executorService; @@ -205,7 +159,7 @@ class PossiblySlowUpstreamHandler extends SimpleChannelInboundHandler { } @Override - protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception { + protected void channelRead0(ChannelHandlerContext ctx, HttpPipelinedRequest msg) throws Exception { executorService.submit(new PossiblySlowRunnable(ctx, msg)); } @@ -220,26 +174,18 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws E class PossiblySlowRunnable implements Runnable { private ChannelHandlerContext ctx; - private HttpPipelinedRequest pipelinedRequest; + private HttpPipelinedRequest pipelinedRequest; private FullHttpRequest fullHttpRequest; - PossiblySlowRunnable(ChannelHandlerContext ctx, Object msg) { + PossiblySlowRunnable(ChannelHandlerContext ctx, HttpPipelinedRequest msg) { this.ctx = ctx; - if (msg instanceof HttpPipelinedRequest) { - this.pipelinedRequest = (HttpPipelinedRequest) msg; - } else if (msg instanceof FullHttpRequest) { - this.fullHttpRequest = (FullHttpRequest) msg; - } + this.pipelinedRequest = msg; + this.fullHttpRequest = pipelinedRequest.getRequest(); } @Override public void run() { - final String uri; - if (pipelinedRequest != null && pipelinedRequest.last() instanceof FullHttpRequest) { - uri = ((FullHttpRequest) pipelinedRequest.last()).uri(); - } else { - uri = fullHttpRequest.uri(); - } + final String uri = fullHttpRequest.uri(); final ByteBuf buffer = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); @@ -258,13 +204,7 @@ public void run() { } final ChannelPromise promise = ctx.newPromise(); - final Object msg; - if (pipelinedRequest != null) { - msg = pipelinedRequest.createHttpResponse(httpResponse, promise); - } else { - msg = httpResponse; - } - ctx.writeAndFlush(msg, promise); + ctx.writeAndFlush(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse, promise), promise); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java deleted file mode 100644 index af0e7c85a8f63..0000000000000 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningDisabledIT.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.http.netty4; - -import io.netty.handler.codec.http.FullHttpResponse; -import org.elasticsearch.ESNetty4IntegTestCase; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Locale; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasSize; - -@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class Netty4PipeliningDisabledIT extends ESNetty4IntegTestCase { - - @Override - protected boolean addMockHttpTransport() { - return false; // enable http - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("http.pipelining", false) - .build(); - } - - public void testThatNettyHttpServerDoesNotSupportPipelining() throws Exception { - ensureGreen(); - String[] requests = new String[] {"/", "/_nodes/stats", "/", "/_cluster/state", "/", "/_nodes", "/"}; - - HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); - TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); - TransportAddress transportAddress = (TransportAddress) randomFrom(boundAddresses); - - try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { - Collection responses = nettyHttpClient.get(transportAddress.address(), requests); - assertThat(responses, hasSize(requests.length)); - - List opaqueIds = new ArrayList<>(Netty4HttpClient.returnOpaqueIds(responses)); - - assertResponsesOutOfOrder(opaqueIds); - } - } - - /** - * checks if all responses are there, but also tests that they are out of order because pipelining is disabled - */ - private void assertResponsesOutOfOrder(List opaqueIds) { - String message = String.format(Locale.ROOT, "Expected returned http message ids to be in any order of: %s", opaqueIds); - assertThat(message, opaqueIds, containsInAnyOrder("0", "1", "2", "3", "4", "5", "6")); - } - -} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java similarity index 97% rename from modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java rename to modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java index 9723ee93faf59..858f42334226a 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningEnabledIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.is; @ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class Netty4PipeliningEnabledIT extends ESNetty4IntegTestCase { +public class Netty4PipeliningIT extends ESNetty4IntegTestCase { @Override protected boolean addMockHttpTransport() { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java index ffb6c8fb3569d..d3b59c35c2163 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java @@ -37,6 +37,8 @@ import io.netty.handler.codec.http.LastHttpContent; import io.netty.handler.codec.http.QueryStringDecoder; import org.elasticsearch.common.Randomness; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -184,7 +186,7 @@ public void testThatPipeliningClosesConnectionWithTooManyEvents() throws Interru } final List latches = new ArrayList<>(); - final List requests = IntStream.range(1, numberOfRequests + 1).mapToObj(r -> r).collect(Collectors.toList()); + final List requests = IntStream.range(1, numberOfRequests + 1).boxed().collect(Collectors.toList()); Randomness.shuffle(requests); for (final Integer request : requests) { @@ -211,19 +213,20 @@ public void testPipeliningRequestsAreReleased() throws InterruptedException { embeddedChannel.writeInbound(createHttpRequest("/" + i)); } - HttpPipelinedRequest inbound; - ArrayList requests = new ArrayList<>(); + HttpPipelinedRequest inbound; + ArrayList> requests = new ArrayList<>(); while ((inbound = embeddedChannel.readInbound()) != null) { requests.add(inbound); } ArrayList promises = new ArrayList<>(); for (int i = 1; i < requests.size(); ++i) { - final DefaultFullHttpResponse httpResponse = new DefaultFullHttpResponse(HTTP_1_1, OK); + final FullHttpResponse httpResponse = new DefaultFullHttpResponse(HTTP_1_1, OK); ChannelPromise promise = embeddedChannel.newPromise(); promises.add(promise); - HttpPipelinedResponse response = requests.get(i).createHttpResponse(httpResponse, promise); - embeddedChannel.writeAndFlush(response, promise); + int sequence = requests.get(i).getSequence(); + HttpPipelinedResponse resp = new HttpPipelinedResponse<>(sequence, httpResponse, promise); + embeddedChannel.writeAndFlush(resp, promise); } for (ChannelPromise promise : promises) { @@ -260,14 +263,14 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpRequest request) thro } - private class WorkEmulatorHandler extends SimpleChannelInboundHandler { + private class WorkEmulatorHandler extends SimpleChannelInboundHandler> { @Override - protected void channelRead0(final ChannelHandlerContext ctx, final HttpPipelinedRequest pipelinedRequest) throws Exception { + protected void channelRead0(final ChannelHandlerContext ctx, HttpPipelinedRequest pipelinedRequest) { + LastHttpContent request = pipelinedRequest.getRequest(); final QueryStringDecoder decoder; - if (pipelinedRequest.last() instanceof FullHttpRequest) { - final FullHttpRequest fullHttpRequest = (FullHttpRequest) pipelinedRequest.last(); - decoder = new QueryStringDecoder(fullHttpRequest.uri()); + if (request instanceof FullHttpRequest) { + decoder = new QueryStringDecoder(((FullHttpRequest)request).uri()); } else { decoder = new QueryStringDecoder(AggregateUrisAndHeadersHandler.QUEUE_URI.poll()); } @@ -286,7 +289,7 @@ protected void channelRead0(final ChannelHandlerContext ctx, final HttpPipelined try { waitingLatch.await(1000, TimeUnit.SECONDS); final ChannelPromise promise = ctx.newPromise(); - ctx.write(pipelinedRequest.createHttpResponse(httpResponse, promise), promise); + ctx.write(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse, promise), promise); finishingLatch.countDown(); } catch (InterruptedException e) { fail(e.toString()); diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index c19cbe4687ce6..d9cf0f630c0f2 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -227,7 +227,6 @@ public void apply(Settings value, Settings current, Settings previous) { HttpTransportSettings.SETTING_CORS_ENABLED, HttpTransportSettings.SETTING_CORS_MAX_AGE, HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED, - HttpTransportSettings.SETTING_PIPELINING, HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN, HttpTransportSettings.SETTING_HTTP_HOST, HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST, diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java b/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java new file mode 100644 index 0000000000000..69deaaf072344 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +public class HttpPipelinedRequest { + + private final int sequence; + private final R request; + + public HttpPipelinedRequest(int sequence, R request) { + this.sequence = sequence; + this.request = request; + } + + public int getSequence() { + return sequence; + } + + public R getRequest() { + return request; + } +} diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java b/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java new file mode 100644 index 0000000000000..add5e3016b321 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +public class HttpPipelinedResponse implements Comparable> { + + private final int sequence; + private final Response response; + private final Listener listener; + + public HttpPipelinedResponse(int sequence, Response response, Listener listener) { + this.sequence = sequence; + this.response = response; + this.listener = listener; + } + + public int getSequence() { + return sequence; + } + + public Response getResponse() { + return response; + } + + public Listener getListener() { + return listener; + } + + @Override + public int compareTo(HttpPipelinedResponse o) { + return Integer.compare(sequence, o.sequence); + } +} diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java b/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java new file mode 100644 index 0000000000000..ff250d7435d79 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +import java.util.ArrayList; +import java.util.List; +import java.util.PriorityQueue; + +public class HttpPipeliningAggregator { + + private final int maxEventsHeld; + private final PriorityQueue> outboundHoldingQueue; + /* + * The current read and write sequence numbers. Read sequence numbers are attached to requests in the order they are read from the + * channel, and then transferred to responses. A response is not written to the channel context until its sequence number matches the + * current write sequence, implying that all preceding messages have been written. + */ + private int readSequence; + private int writeSequence; + + public HttpPipeliningAggregator(int maxEventsHeld) { + this.maxEventsHeld = maxEventsHeld; + this.outboundHoldingQueue = new PriorityQueue<>(1); + } + + public HttpPipelinedRequest read(final Request request) { + return new HttpPipelinedRequest<>(readSequence++, request); + } + + public ArrayList> write(final HttpPipelinedResponse response) { + if (outboundHoldingQueue.size() < maxEventsHeld) { + ArrayList> readyResponses = new ArrayList<>(); + outboundHoldingQueue.add(response); + while (!outboundHoldingQueue.isEmpty()) { + /* + * Since the response with the lowest sequence number is the top of the priority queue, we know if its sequence + * number does not match the current write sequence number then we have not processed all preceding responses yet. + */ + final HttpPipelinedResponse top = outboundHoldingQueue.peek(); + + if (top.getSequence() != writeSequence) { + break; + } + outboundHoldingQueue.poll(); + readyResponses.add(top); + writeSequence++; + } + + return readyResponses; + } else { + int eventCount = outboundHoldingQueue.size() + 1; + throw new IllegalStateException("Too many pipelined events [" + eventCount + "]. Max events allowed [" + + maxEventsHeld + "]."); + } + } + + public List> removeAllInflightResponses() { + ArrayList> responses = new ArrayList<>(outboundHoldingQueue); + outboundHoldingQueue.clear(); + return responses; + } +} diff --git a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 98451e0c304b9..4670137d09a54 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -49,8 +49,6 @@ public final class HttpTransportSettings { new Setting<>("http.cors.allow-headers", "X-Requested-With,Content-Type,Content-Length", (value) -> value, Property.NodeScope); public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, Property.NodeScope); - public static final Setting SETTING_PIPELINING = - Setting.boolSetting("http.pipelining", true, Property.NodeScope); public static final Setting SETTING_PIPELINING_MAX_EVENTS = Setting.intSetting("http.pipelining.max_events", 10000, Property.NodeScope); public static final Setting SETTING_HTTP_COMPRESSION = From 56d196ef2b6a6dc96dc9adb7f7a80422f907122f Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 16 May 2018 16:26:23 -0600 Subject: [PATCH 02/11] Remove need for synchronized --- .../pipelining/HttpPipeliningHandler.java | 29 +++++++++---------- .../Netty4HttpPipeliningHandlerTests.java | 21 +++++++++----- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java index 20e6699d612c2..150f9ac340ce2 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java @@ -68,24 +68,21 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw @Override public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { if (msg instanceof HttpPipelinedResponse) { - synchronized (aggregator) { - HttpPipelinedResponse response = (HttpPipelinedResponse) msg; - boolean success = false; - try { - ArrayList> responsesToWrite = aggregator.write(response); - success = true; - for (HttpPipelinedResponse responseToWrite : responsesToWrite) { - ctx.write(responseToWrite.getResponse(), responseToWrite.getListener()); - } - } catch (IllegalStateException e) { - Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); - } finally { - if (success == false) { - response.getListener().setFailure(new ClosedChannelException()); - } + HttpPipelinedResponse response = (HttpPipelinedResponse) msg; + boolean success = false; + try { + ArrayList> responsesToWrite = aggregator.write(response); + success = true; + for (HttpPipelinedResponse responseToWrite : responsesToWrite) { + ctx.write(responseToWrite.getResponse(), responseToWrite.getListener()); + } + } catch (IllegalStateException e) { + Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); + } finally { + if (success == false) { + response.getListener().setFailure(new ClosedChannelException()); } } - } else { ctx.write(msg, promise); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java index d3b59c35c2163..6d03077f25441 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java @@ -64,7 +64,8 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase { - private final ExecutorService executorService = Executors.newFixedThreadPool(randomIntBetween(4, 8)); + private final ExecutorService handlerService = Executors.newFixedThreadPool(randomIntBetween(4, 8)); + private final ExecutorService eventLoopService = Executors.newFixedThreadPool(1); private final Map waitingRequests = new ConcurrentHashMap<>(); private final Map finishingRequests = new ConcurrentHashMap<>(); @@ -81,9 +82,13 @@ private CountDownLatch finishRequest(String url) { } private void shutdownExecutorService() throws InterruptedException { - if (!executorService.isShutdown()) { - executorService.shutdown(); - executorService.awaitTermination(10, TimeUnit.SECONDS); + if (!handlerService.isShutdown()) { + handlerService.shutdown(); + handlerService.awaitTermination(10, TimeUnit.SECONDS); + } + if (!eventLoopService.isShutdown()) { + eventLoopService.shutdown(); + eventLoopService.awaitTermination(10, TimeUnit.SECONDS); } } @@ -285,12 +290,14 @@ protected void channelRead0(final ChannelHandlerContext ctx, HttpPipelinedReques final CountDownLatch finishingLatch = new CountDownLatch(1); finishingRequests.put(uri, finishingLatch); - executorService.submit(() -> { + handlerService.submit(() -> { try { waitingLatch.await(1000, TimeUnit.SECONDS); final ChannelPromise promise = ctx.newPromise(); - ctx.write(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse, promise), promise); - finishingLatch.countDown(); + eventLoopService.submit(() -> { + ctx.write(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse, promise), promise); + finishingLatch.countDown(); + }); } catch (InterruptedException e) { fail(e.toString()); } From 5a494bca4b965c09ddea7c445326917ac7bbe8ec Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 16 May 2018 17:02:55 -0600 Subject: [PATCH 03/11] Work on nio pipelining --- .../netty4/Netty4HttpServerTransport.java | 7 +- .../pipelining/HttpPipeliningHandler.java | 1 + .../http/nio/HttpReadWriteHandler.java | 20 +++- .../http/nio/HttpWriteOperation.java | 8 +- .../elasticsearch/http/nio/NettyListener.java | 2 +- .../http/nio/NioHttpChannel.java | 11 +- .../http/nio/NioHttpServerTransport.java | 15 ++- .../pipelining/NioHttpPipeliningHandler.java | 110 ++++++++++++++++++ .../http/nio/HttpReadWriteHandlerTests.java | 12 +- .../http/HttpHandlingSettings.java | 9 +- 10 files changed, 172 insertions(+), 23 deletions(-) create mode 100644 plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index bf8879b6064b5..9b26527dd489c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -201,6 +201,7 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic this.maxChunkSize = SETTING_HTTP_MAX_CHUNK_SIZE.get(settings); this.maxHeaderSize = SETTING_HTTP_MAX_HEADER_SIZE.get(settings); this.maxInitialLineLength = SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.get(settings); + this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); this.httpHandlingSettings = new HttpHandlingSettings(Math.toIntExact(maxContentLength.getBytes()), Math.toIntExact(maxChunkSize.getBytes()), Math.toIntExact(maxHeaderSize.getBytes()), @@ -208,7 +209,8 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic SETTING_HTTP_RESET_COOKIES.get(settings), SETTING_HTTP_COMPRESSION.get(settings), SETTING_HTTP_COMPRESSION_LEVEL.get(settings), - SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings)); + SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings), + pipeliningMaxEvents); this.maxCompositeBufferComponents = SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings); this.workerCount = SETTING_HTTP_WORKER_COUNT.get(settings); @@ -223,12 +225,11 @@ public Netty4HttpServerTransport(Settings settings, NetworkService networkServic ByteSizeValue receivePredictor = SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE.get(settings); recvByteBufAllocator = new FixedRecvByteBufAllocator(receivePredictor.bytesAsInt()); - this.pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); this.corsConfig = buildCorsConfig(settings); logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " + "receive_predictor[{}], max_composite_buffer_components[{}], pipelining_max_events[{}]", - maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength, receivePredictor, maxCompositeBufferComponents, + maxChunkSize, maxHeaderSize, maxInitialLineLength, maxContentLength, receivePredictor, maxCompositeBufferComponents, pipeliningMaxEvents); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java index 150f9ac340ce2..794616372f1fd 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java @@ -77,6 +77,7 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann ctx.write(responseToWrite.getResponse(), responseToWrite.getListener()); } } catch (IllegalStateException e) { + ctx.channel().close(); Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); } finally { if (success == false) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index f1d18ddacbd13..396a666318bdb 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -35,6 +35,9 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpHandlingSettings; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedResponse; +import org.elasticsearch.http.nio.pipelining.NioHttpPipeliningHandler; import org.elasticsearch.nio.FlushOperation; import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.ReadWriteHandler; @@ -77,6 +80,7 @@ public class HttpReadWriteHandler implements ReadWriteHandler { if (settings.isCompression()) { handlers.add(new HttpContentCompressor(settings.getCompressionLevel())); } + handlers.add(new NioHttpPipeliningHandler(transport.getLogger(), settings.getPipeliningMaxEvents())); adaptor = new NettyAdaptor(handlers.toArray(new ChannelHandler[0])); adaptor.addCloseListener((v, e) -> nioChannel.close()); @@ -94,10 +98,11 @@ public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException { } @Override + @SuppressWarnings("unchecked") public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer listener) { - assert message instanceof FullHttpResponse : "This channel only supports messages that are of type: " + FullHttpResponse.class - + ". Found type: " + message.getClass() + "."; - return new HttpWriteOperation(context, (FullHttpResponse) message, listener); + assert message instanceof HttpPipelinedResponse : "This channel only supports messages that are of type: " + + HttpPipelinedResponse.class + ". Found type: " + message.getClass() + "."; + return new HttpWriteOperation(context, (HttpPipelinedResponse>) message, listener); } @Override @@ -125,8 +130,10 @@ public void close() throws IOException { } } + @SuppressWarnings("unchecked") private void handleRequest(Object msg) { - final FullHttpRequest request = (FullHttpRequest) msg; + final HttpPipelinedRequest pipelinedRequest = (HttpPipelinedRequest) msg; + FullHttpRequest request = pipelinedRequest.getRequest(); final FullHttpRequest copiedRequest = new DefaultFullHttpRequest( @@ -170,8 +177,9 @@ private void handleRequest(Object msg) { final NioHttpChannel channel; { NioHttpChannel innerChannel; + int sequence = pipelinedRequest.getSequence(); try { - innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), httpRequest, settings, threadContext); + innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), httpRequest, sequence, settings, threadContext); } catch (final IllegalArgumentException e) { if (badRequestCause == null) { badRequestCause = e; @@ -184,7 +192,7 @@ private void handleRequest(Object msg) { Collections.emptyMap(), // we are going to dispatch the request as a bad request, drop all parameters copiedRequest.uri(), copiedRequest); - innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), innerRequest, settings, threadContext); + innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), innerRequest, sequence, settings, threadContext); } channel = innerChannel; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java index c838ae85e9d40..78d9fcfb59b66 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java @@ -20,6 +20,7 @@ package org.elasticsearch.http.nio; import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.nio.WriteOperation; @@ -28,10 +29,11 @@ public class HttpWriteOperation implements WriteOperation { private final SocketChannelContext channelContext; - private final FullHttpResponse response; + private final HttpPipelinedResponse> response; private final BiConsumer listener; - HttpWriteOperation(SocketChannelContext channelContext, FullHttpResponse response, BiConsumer listener) { + HttpWriteOperation(SocketChannelContext channelContext, HttpPipelinedResponse> response, + BiConsumer listener) { this.channelContext = channelContext; this.response = response; this.listener = listener; @@ -48,7 +50,7 @@ public SocketChannelContext getChannel() { } @Override - public FullHttpResponse getObject() { + public HttpPipelinedResponse> getObject() { return response; } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java index e806b0d23ce3a..02bcca7ba6f77 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java @@ -40,7 +40,7 @@ public class NettyListener implements BiConsumer, ChannelPromis private final ChannelPromise promise; - NettyListener(ChannelPromise promise) { + public NettyListener(ChannelPromise promise) { this.promise = promise; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index 672c6d5abad0e..b141f3fe4508b 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpHandlingSettings; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.RestResponse; @@ -52,20 +53,23 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.BiConsumer; public class NioHttpChannel extends AbstractRestChannel { private final BigArrays bigArrays; + private final int sequence; private final ThreadContext threadContext; private final FullHttpRequest nettyRequest; private final NioSocketChannel nioChannel; private final boolean resetCookies; - NioHttpChannel(NioSocketChannel nioChannel, BigArrays bigArrays, NioHttpRequest request, + NioHttpChannel(NioSocketChannel nioChannel, BigArrays bigArrays, NioHttpRequest request, int sequence, HttpHandlingSettings settings, ThreadContext threadContext) { super(request, settings.getDetailedErrorsEnabled()); this.nioChannel = nioChannel; this.bigArrays = bigArrays; + this.sequence = sequence; this.threadContext = threadContext; this.nettyRequest = request.getRequest(); this.resetCookies = settings.isResetCookies(); @@ -117,9 +121,10 @@ public void sendResponse(RestResponse response) { toClose.add(nioChannel::close); } - nioChannel.getContext().sendMessage(resp, (aVoid, throwable) -> { + BiConsumer listener = (aVoid, throwable) -> { Releasables.close(toClose); - }); + }; + nioChannel.getContext().sendMessage(new HttpPipelinedResponse<>(sequence, resp, listener), listener); success = true; } finally { if (success == false) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index bdbee715bd0cf..8757a12e17ebe 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -20,6 +20,7 @@ package org.elasticsearch.http.nio; import io.netty.handler.timeout.ReadTimeoutException; +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; @@ -84,6 +85,7 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_REUSE_ADDRESS; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_TCP_SEND_BUFFER_SIZE; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS; public class NioHttpServerTransport extends AbstractHttpServerTransport { @@ -124,6 +126,7 @@ public NioHttpServerTransport(Settings settings, NetworkService networkService, ByteSizeValue maxChunkSize = SETTING_HTTP_MAX_CHUNK_SIZE.get(settings); ByteSizeValue maxHeaderSize = SETTING_HTTP_MAX_HEADER_SIZE.get(settings); ByteSizeValue maxInitialLineLength = SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.get(settings); + int pipeliningMaxEvents = SETTING_PIPELINING_MAX_EVENTS.get(settings); this.httpHandlingSettings = new HttpHandlingSettings(Math.toIntExact(maxContentLength.getBytes()), Math.toIntExact(maxChunkSize.getBytes()), Math.toIntExact(maxHeaderSize.getBytes()), @@ -131,7 +134,8 @@ public NioHttpServerTransport(Settings settings, NetworkService networkService, SETTING_HTTP_RESET_COOKIES.get(settings), SETTING_HTTP_COMPRESSION.get(settings), SETTING_HTTP_COMPRESSION_LEVEL.get(settings), - SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings)); + SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings), + pipeliningMaxEvents); this.tcpNoDelay = SETTING_HTTP_TCP_NO_DELAY.get(settings); this.tcpKeepAlive = SETTING_HTTP_TCP_KEEP_ALIVE.get(settings); @@ -140,14 +144,19 @@ public NioHttpServerTransport(Settings settings, NetworkService networkService, this.tcpReceiveBufferSize = Math.toIntExact(SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE.get(settings).getBytes()); - logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}]", - maxChunkSize, maxHeaderSize, maxInitialLineLength, maxContentLength); + logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}]," + + " pipelining_max_events[{}]", + maxChunkSize, maxHeaderSize, maxInitialLineLength, maxContentLength, pipeliningMaxEvents); } BigArrays getBigArrays() { return bigArrays; } + public Logger getLogger() { + return logger; + } + @Override protected void doStart() { boolean success = false; diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java new file mode 100644 index 0000000000000..751061535408f --- /dev/null +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio.pipelining; + +import io.netty.channel.ChannelDuplexHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPromise; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.LastHttpContent; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.http.HttpPipelinedResponse; +import org.elasticsearch.http.HttpPipeliningAggregator; +import org.elasticsearch.http.nio.NettyListener; + +import java.nio.channels.ClosedChannelException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; + +/** + * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their corresponding requests. + */ +public class NioHttpPipeliningHandler extends ChannelDuplexHandler { + + private final Logger logger; + private final HttpPipeliningAggregator> aggregator; + + /** + * Construct a new pipelining handler; this handler should be used downstream of HTTP decoding/aggregation. + * + * @param logger for logging unexpected errors + * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel connection; this is + * required as events cannot queue up indefinitely + */ + public NioHttpPipeliningHandler(Logger logger, final int maxEventsHeld) { + this.logger = logger; + this.aggregator = new HttpPipeliningAggregator<>(maxEventsHeld); + } + + @Override + public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception { + if (msg instanceof LastHttpContent) { + HttpPipelinedRequest pipelinedRequest = aggregator.read(((LastHttpContent) msg).retain()); + ctx.fireChannelRead(pipelinedRequest); + } else { + ctx.fireChannelRead(msg); + } + } + + @Override + @SuppressWarnings("unchecked") + public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { + if (msg instanceof HttpPipelinedResponse) { + HttpPipelinedResponse> response = + (HttpPipelinedResponse>) msg; + boolean success = false; + try { + List>> responsesToWrite = aggregator.write(response); + success = true; + for (HttpPipelinedResponse> responseToWrite : responsesToWrite) { +// ctx.write(responseToWrite.getResponse(), new NettyListener(responseToWrite.getListener())); + } + } catch (IllegalStateException e) { + ctx.channel().close(); + } finally { + if (success == false) { + response.getListener().accept(null, new ClosedChannelException()); + } + } + } else { + ctx.write(msg, promise); + } + } + + @Override + public void close(ChannelHandlerContext ctx, ChannelPromise promise) { + List>> inflightResponses = + aggregator.removeAllInflightResponses(); + + if (inflightResponses.isEmpty() == false) { + ClosedChannelException closedChannelException = new ClosedChannelException(); + for (HttpPipelinedResponse> inflightResponse : inflightResponses) { + try { + inflightResponse.getListener().accept(null, closedChannelException); + } catch (RuntimeException e) { + logger.error("unexpected error while releasing pipelined http responses", e); + } + } + } + ctx.close(promise); + } +} diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java index dce8319d2fc82..f838db384ab44 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpHandlingSettings; +import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.nio.FlushOperation; import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.NioSocketChannel; @@ -61,6 +62,7 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_RESET_COOKIES; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -91,7 +93,8 @@ public void setMocks() { SETTING_HTTP_RESET_COOKIES.getDefault(settings), SETTING_HTTP_COMPRESSION.getDefault(settings), SETTING_HTTP_COMPRESSION_LEVEL.getDefault(settings), - SETTING_HTTP_DETAILED_ERRORS_ENABLED.getDefault(settings)); + SETTING_HTTP_DETAILED_ERRORS_ENABLED.getDefault(settings), + SETTING_PIPELINING_MAX_EVENTS.getDefault(settings)); ThreadContext threadContext = new ThreadContext(settings); nioSocketChannel = mock(NioSocketChannel.class); handler = new HttpReadWriteHandler(nioSocketChannel, transport, httpHandlingSettings, NamedXContentRegistry.EMPTY, threadContext); @@ -148,7 +151,8 @@ public void testDecodeHttpRequestContentLengthToLongGeneratesOutboundMessage() t handler.consumeReads(toChannelBuffer(buf)); - verifyZeroInteractions(transport); + verify(transport, times(0)).dispatchBadRequest(any(), any(), any()); + verify(transport, times(0)).dispatchRequest(any(), any()); List flushOperations = handler.pollFlushOperations(); assertFalse(flushOperations.isEmpty()); @@ -169,9 +173,11 @@ public void testEncodeHttpResponse() throws IOException { prepareHandlerForResponse(handler); FullHttpResponse fullHttpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); + HttpPipelinedResponse> pipelinedResponse = + new HttpPipelinedResponse<>(0, fullHttpResponse, mock(BiConsumer.class)); SocketChannelContext context = mock(SocketChannelContext.class); - HttpWriteOperation writeOperation = new HttpWriteOperation(context, fullHttpResponse, mock(BiConsumer.class)); + HttpWriteOperation writeOperation = new HttpWriteOperation(context, pipelinedResponse, mock(BiConsumer.class)); List flushOperations = handler.writeToBytes(writeOperation); HttpResponse response = responseDecoder.decode(Unpooled.wrappedBuffer(flushOperations.get(0).getBuffersToWrite())); diff --git a/server/src/main/java/org/elasticsearch/http/HttpHandlingSettings.java b/server/src/main/java/org/elasticsearch/http/HttpHandlingSettings.java index f86049292f3fd..df038e8303edb 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpHandlingSettings.java +++ b/server/src/main/java/org/elasticsearch/http/HttpHandlingSettings.java @@ -29,9 +29,11 @@ public class HttpHandlingSettings { private final boolean compression; private final int compressionLevel; private final boolean detailedErrorsEnabled; + private final int pipeliningMaxEvents; public HttpHandlingSettings(int maxContentLength, int maxChunkSize, int maxHeaderSize, int maxInitialLineLength, - boolean resetCookies, boolean compression, int compressionLevel, boolean detailedErrorsEnabled) { + boolean resetCookies, boolean compression, int compressionLevel, boolean detailedErrorsEnabled, + int pipeliningMaxEvents) { this.maxContentLength = maxContentLength; this.maxChunkSize = maxChunkSize; this.maxHeaderSize = maxHeaderSize; @@ -40,6 +42,7 @@ public HttpHandlingSettings(int maxContentLength, int maxChunkSize, int maxHeade this.compression = compression; this.compressionLevel = compressionLevel; this.detailedErrorsEnabled = detailedErrorsEnabled; + this.pipeliningMaxEvents = pipeliningMaxEvents; } public int getMaxContentLength() { @@ -73,4 +76,8 @@ public int getCompressionLevel() { public boolean getDetailedErrorsEnabled() { return detailedErrorsEnabled; } + + public int getPipeliningMaxEvents() { + return pipeliningMaxEvents; + } } From 09b86a37c741efda71482e9b09188e4f186d7722 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 10:08:02 -0600 Subject: [PATCH 04/11] IMplement nio pipelining --- .../http/netty4/Netty4HttpChannel.java | 2 +- .../pipelining/HttpPipeliningHandler.java | 18 ++++++----- .../http/netty4/Netty4HttpChannelTests.java | 4 +-- .../Netty4HttpServerPipeliningTests.java | 2 +- .../Netty4HttpPipeliningHandlerTests.java | 4 +-- .../http/nio/HttpReadWriteHandler.java | 2 +- .../http/nio/HttpWriteOperation.java | 6 ++-- .../elasticsearch/http/nio/NettyAdaptor.java | 20 ++----------- .../elasticsearch/http/nio/NettyListener.java | 30 +++++++++++++++++-- .../http/nio/NioHttpChannel.java | 6 ++-- .../pipelining/NioHttpPipeliningHandler.java | 26 ++++++++-------- .../http/nio/HttpReadWriteHandlerTests.java | 3 +- .../http/HttpPipelinedResponse.java | 12 ++------ .../http/HttpPipeliningAggregator.java | 18 ++++++----- .../test/InternalTestCluster.java | 1 - 15 files changed, 78 insertions(+), 76 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index e5c135b6e5aeb..2c8e11cf94f4d 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -135,7 +135,7 @@ public void sendResponse(RestResponse response) { promise.addListener(ChannelFutureListener.CLOSE); } - HttpPipelinedResponse newResponse = new HttpPipelinedResponse<>(sequence, resp, promise); + HttpPipelinedResponse newResponse = new HttpPipelinedResponse<>(sequence, resp); channel.writeAndFlush(newResponse, promise); releaseContent = false; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java index 794616372f1fd..239ebad734c39 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java @@ -25,6 +25,7 @@ import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.LastHttpContent; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpPipeliningAggregator; @@ -66,22 +67,23 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw } @Override + @SuppressWarnings("unchecked") public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { if (msg instanceof HttpPipelinedResponse) { - HttpPipelinedResponse response = (HttpPipelinedResponse) msg; + HttpPipelinedResponse response = (HttpPipelinedResponse) msg; boolean success = false; try { - ArrayList> responsesToWrite = aggregator.write(response); + List, ChannelPromise>> readyResponses = aggregator.write(response, promise); success = true; - for (HttpPipelinedResponse responseToWrite : responsesToWrite) { - ctx.write(responseToWrite.getResponse(), responseToWrite.getListener()); + for (Tuple, ChannelPromise> readyResponse : readyResponses) { + ctx.write(readyResponse.v1().getResponse(), readyResponse.v2()); } } catch (IllegalStateException e) { ctx.channel().close(); Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); } finally { if (success == false) { - response.getListener().setFailure(new ClosedChannelException()); + promise.setFailure(new ClosedChannelException()); } } } else { @@ -91,13 +93,13 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann @Override public void close(ChannelHandlerContext ctx, ChannelPromise promise) { - List> inflightResponses = aggregator.removeAllInflightResponses(); + List, ChannelPromise>> inflightResponses = aggregator.removeAllInflightResponses(); if (inflightResponses.isEmpty() == false) { ClosedChannelException closedChannelException = new ClosedChannelException(); - for (HttpPipelinedResponse inflightResponse : inflightResponses) { + for (Tuple, ChannelPromise> inflightResponse : inflightResponses) { try { - inflightResponse.getListener().setFailure(closedChannelException); + inflightResponse.v2().setFailure(closedChannelException); } catch (RuntimeException e) { logger.error("unexpected error while releasing pipelined http responses", e); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java index d6ce4a03f0579..c889618516a87 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -229,7 +229,7 @@ public void testHeadersSet() { // inspect what was written List writtenObjects = writeCapturingChannel.getWrittenObjects(); assertThat(writtenObjects.size(), is(1)); - HttpResponse response = ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); + HttpResponse response = ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); assertThat(response.headers().get("non-existent-header"), nullValue()); assertThat(response.headers().get(customHeader), equalTo(customHeaderValue)); assertThat(response.headers().get(HttpHeaderNames.CONTENT_LENGTH), equalTo(Integer.toString(resp.content().length()))); @@ -347,7 +347,7 @@ private FullHttpResponse executeRequest(final Settings settings, final String or // get the response List writtenObjects = writeCapturingChannel.getWrittenObjects(); assertThat(writtenObjects.size(), is(1)); - return ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); + return ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index e51ff9ed5ae8b..7652dfe38faab 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -204,7 +204,7 @@ public void run() { } final ChannelPromise promise = ctx.newPromise(); - ctx.writeAndFlush(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse, promise), promise); + ctx.writeAndFlush(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse), promise); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java index 6d03077f25441..aea8baffeb9f7 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java @@ -230,7 +230,7 @@ public void testPipeliningRequestsAreReleased() throws InterruptedException { ChannelPromise promise = embeddedChannel.newPromise(); promises.add(promise); int sequence = requests.get(i).getSequence(); - HttpPipelinedResponse resp = new HttpPipelinedResponse<>(sequence, httpResponse, promise); + HttpPipelinedResponse resp = new HttpPipelinedResponse<>(sequence, httpResponse); embeddedChannel.writeAndFlush(resp, promise); } @@ -295,7 +295,7 @@ protected void channelRead0(final ChannelHandlerContext ctx, HttpPipelinedReques waitingLatch.await(1000, TimeUnit.SECONDS); final ChannelPromise promise = ctx.newPromise(); eventLoopService.submit(() -> { - ctx.write(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse, promise), promise); + ctx.write(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse), promise); finishingLatch.countDown(); }); } catch (InterruptedException e) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index 396a666318bdb..f0407d12687e3 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -102,7 +102,7 @@ public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException { public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer listener) { assert message instanceof HttpPipelinedResponse : "This channel only supports messages that are of type: " + HttpPipelinedResponse.class + ". Found type: " + message.getClass() + "."; - return new HttpWriteOperation(context, (HttpPipelinedResponse>) message, listener); + return new HttpWriteOperation(context, (HttpPipelinedResponse) message, listener); } @Override diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java index 78d9fcfb59b66..485710165f421 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java @@ -29,10 +29,10 @@ public class HttpWriteOperation implements WriteOperation { private final SocketChannelContext channelContext; - private final HttpPipelinedResponse> response; + private final HttpPipelinedResponse response; private final BiConsumer listener; - HttpWriteOperation(SocketChannelContext channelContext, HttpPipelinedResponse> response, + HttpWriteOperation(SocketChannelContext channelContext, HttpPipelinedResponse response, BiConsumer listener) { this.channelContext = channelContext; this.response = response; @@ -50,7 +50,7 @@ public SocketChannelContext getChannel() { } @Override - public HttpPipelinedResponse> getObject() { + public HttpPipelinedResponse getObject() { return response; } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java index 3344a31264121..cf8c92bff905c 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java @@ -53,12 +53,7 @@ public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) try { ByteBuf message = (ByteBuf) msg; promise.addListener((f) -> message.release()); - NettyListener listener; - if (promise instanceof NettyListener) { - listener = (NettyListener) promise; - } else { - listener = new NettyListener(promise); - } + NettyListener listener = NettyListener.fromChannelPromise(promise); flushOperations.add(new FlushOperation(message.nioBuffers(), listener)); } catch (Exception e) { promise.setFailure(e); @@ -107,18 +102,7 @@ public Object pollInboundMessage() { } public void write(WriteOperation writeOperation) { - ChannelPromise channelPromise = nettyChannel.newPromise(); - channelPromise.addListener(f -> { - BiConsumer consumer = writeOperation.getListener(); - if (f.cause() == null) { - consumer.accept(null, null); - } else { - ExceptionsHelper.dieOnError(f.cause()); - consumer.accept(null, f.cause()); - } - }); - - nettyChannel.writeAndFlush(writeOperation.getObject(), new NettyListener(channelPromise)); + nettyChannel.writeAndFlush(writeOperation.getObject(), NettyListener.fromBiConsumer(writeOperation.getListener(), nettyChannel)); } public FlushOperation pollOutboundOperation() { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java index 02bcca7ba6f77..b907c0f2bc6f6 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyListener.java @@ -23,7 +23,7 @@ import io.netty.channel.ChannelPromise; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.util.concurrent.FutureUtils; import java.util.concurrent.ExecutionException; @@ -40,7 +40,7 @@ public class NettyListener implements BiConsumer, ChannelPromis private final ChannelPromise promise; - public NettyListener(ChannelPromise promise) { + private NettyListener(ChannelPromise promise) { this.promise = promise; } @@ -211,4 +211,30 @@ public boolean isVoid() { public ChannelPromise unvoid() { return promise.unvoid(); } + + public static NettyListener fromBiConsumer(BiConsumer biConsumer, Channel channel) { + if (biConsumer instanceof NettyListener) { + return (NettyListener) biConsumer; + } else { + ChannelPromise channelPromise = channel.newPromise(); + channelPromise.addListener(f -> { + if (f.cause() == null) { + biConsumer.accept(null, null); + } else { + ExceptionsHelper.dieOnError(f.cause()); + biConsumer.accept(null, f.cause()); + } + }); + + return new NettyListener(channelPromise); + } + } + + public static NettyListener fromChannelPromise(ChannelPromise channelPromise) { + if (channelPromise instanceof NettyListener) { + return (NettyListener) channelPromise; + } else { + return new NettyListener(channelPromise); + } + } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index b141f3fe4508b..75cb7d2154c1d 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -121,10 +121,8 @@ public void sendResponse(RestResponse response) { toClose.add(nioChannel::close); } - BiConsumer listener = (aVoid, throwable) -> { - Releasables.close(toClose); - }; - nioChannel.getContext().sendMessage(new HttpPipelinedResponse<>(sequence, resp, listener), listener); + BiConsumer listener = (aVoid, throwable) -> Releasables.close(toClose); + nioChannel.getContext().sendMessage(new HttpPipelinedResponse<>(sequence, resp), listener); success = true; } finally { if (success == false) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java index 751061535408f..9b3210dd523c2 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java @@ -25,15 +25,14 @@ import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.LastHttpContent; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpPipeliningAggregator; import org.elasticsearch.http.nio.NettyListener; import java.nio.channels.ClosedChannelException; -import java.util.ArrayList; import java.util.List; -import java.util.function.BiConsumer; /** * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their corresponding requests. @@ -41,7 +40,7 @@ public class NioHttpPipeliningHandler extends ChannelDuplexHandler { private final Logger logger; - private final HttpPipeliningAggregator> aggregator; + private final HttpPipeliningAggregator aggregator; /** * Construct a new pipelining handler; this handler should be used downstream of HTTP decoding/aggregation. @@ -67,22 +66,22 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw @Override @SuppressWarnings("unchecked") - public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { + public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) { if (msg instanceof HttpPipelinedResponse) { - HttpPipelinedResponse> response = - (HttpPipelinedResponse>) msg; + HttpPipelinedResponse response = (HttpPipelinedResponse) msg; boolean success = false; try { - List>> responsesToWrite = aggregator.write(response); + NettyListener listener = NettyListener.fromChannelPromise(promise); + List, NettyListener>> readyResponses = aggregator.write(response, listener); success = true; - for (HttpPipelinedResponse> responseToWrite : responsesToWrite) { -// ctx.write(responseToWrite.getResponse(), new NettyListener(responseToWrite.getListener())); + for (Tuple, NettyListener> responseToWrite : readyResponses) { + ctx.write(responseToWrite.v1().getResponse(), responseToWrite.v2()); } } catch (IllegalStateException e) { ctx.channel().close(); } finally { if (success == false) { - response.getListener().accept(null, new ClosedChannelException()); + promise.setFailure(new ClosedChannelException()); } } } else { @@ -92,14 +91,13 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann @Override public void close(ChannelHandlerContext ctx, ChannelPromise promise) { - List>> inflightResponses = - aggregator.removeAllInflightResponses(); + List, NettyListener>> inflightResponses = aggregator.removeAllInflightResponses(); if (inflightResponses.isEmpty() == false) { ClosedChannelException closedChannelException = new ClosedChannelException(); - for (HttpPipelinedResponse> inflightResponse : inflightResponses) { + for (Tuple, NettyListener> inflightResponse : inflightResponses) { try { - inflightResponse.getListener().accept(null, closedChannelException); + inflightResponse.v2().setFailure(closedChannelException); } catch (RuntimeException e) { logger.error("unexpected error while releasing pipelined http responses", e); } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java index f838db384ab44..416f3cb791f54 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java @@ -173,8 +173,7 @@ public void testEncodeHttpResponse() throws IOException { prepareHandlerForResponse(handler); FullHttpResponse fullHttpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); - HttpPipelinedResponse> pipelinedResponse = - new HttpPipelinedResponse<>(0, fullHttpResponse, mock(BiConsumer.class)); + HttpPipelinedResponse pipelinedResponse = new HttpPipelinedResponse<>(0, fullHttpResponse); SocketChannelContext context = mock(SocketChannelContext.class); HttpWriteOperation writeOperation = new HttpWriteOperation(context, pipelinedResponse, mock(BiConsumer.class)); diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java b/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java index add5e3016b321..cd1082f080747 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java +++ b/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java @@ -18,16 +18,14 @@ */ package org.elasticsearch.http; -public class HttpPipelinedResponse implements Comparable> { +public class HttpPipelinedResponse implements Comparable> { private final int sequence; private final Response response; - private final Listener listener; - public HttpPipelinedResponse(int sequence, Response response, Listener listener) { + public HttpPipelinedResponse(int sequence, Response response) { this.sequence = sequence; this.response = response; - this.listener = listener; } public int getSequence() { @@ -38,12 +36,8 @@ public Response getResponse() { return response; } - public Listener getListener() { - return listener; - } - @Override - public int compareTo(HttpPipelinedResponse o) { + public int compareTo(HttpPipelinedResponse o) { return Integer.compare(sequence, o.sequence); } } diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java b/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java index ff250d7435d79..91e44bff261f7 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java +++ b/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.http; +import org.elasticsearch.common.collect.Tuple; + import java.util.ArrayList; import java.util.List; import java.util.PriorityQueue; @@ -25,7 +27,7 @@ public class HttpPipeliningAggregator { private final int maxEventsHeld; - private final PriorityQueue> outboundHoldingQueue; + private final PriorityQueue, Listener>> outboundHoldingQueue; /* * The current read and write sequence numbers. Read sequence numbers are attached to requests in the order they are read from the * channel, and then transferred to responses. A response is not written to the channel context until its sequence number matches the @@ -43,18 +45,18 @@ public HttpPipelinedRequest read(final Request request) { return new HttpPipelinedRequest<>(readSequence++, request); } - public ArrayList> write(final HttpPipelinedResponse response) { + public List, Listener>> write(final HttpPipelinedResponse response, Listener listener) { if (outboundHoldingQueue.size() < maxEventsHeld) { - ArrayList> readyResponses = new ArrayList<>(); - outboundHoldingQueue.add(response); + ArrayList, Listener>> readyResponses = new ArrayList<>(); + outboundHoldingQueue.add(new Tuple<>(response, listener)); while (!outboundHoldingQueue.isEmpty()) { /* * Since the response with the lowest sequence number is the top of the priority queue, we know if its sequence * number does not match the current write sequence number then we have not processed all preceding responses yet. */ - final HttpPipelinedResponse top = outboundHoldingQueue.peek(); + final Tuple, Listener> top = outboundHoldingQueue.peek(); - if (top.getSequence() != writeSequence) { + if (top.v1().getSequence() != writeSequence) { break; } outboundHoldingQueue.poll(); @@ -70,8 +72,8 @@ public ArrayList> write(final HttpPipe } } - public List> removeAllInflightResponses() { - ArrayList> responses = new ArrayList<>(outboundHoldingQueue); + public List, Listener>> removeAllInflightResponses() { + ArrayList, Listener>> responses = new ArrayList<>(outboundHoldingQueue); outboundHoldingQueue.clear(); return responses; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 5099fc0540de2..b945f7d84eb95 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -300,7 +300,6 @@ public InternalTestCluster(long clusterSeed, Path baseDir, builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put(TcpTransport.PORT.getKey(), 0); builder.put("http.port", 0); - builder.put("http.pipelining", enableHttpPipelining); if (Strings.hasLength(System.getProperty("tests.es.logger.level"))) { builder.put("logger.level", System.getProperty("tests.es.logger.level")); } From 30339f9863f23c2d1f5142759eae3b7a20795fb9 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 10:45:58 -0600 Subject: [PATCH 05/11] Some reworks --- .../http/netty4/Netty4HttpChannel.java | 4 +- .../pipelining/HttpPipeliningHandler.java | 18 ++++----- .../netty4/pipelining/Netty4HttpResponse.java | 37 +++++++++++++++++++ .../http/netty4/Netty4HttpChannelTests.java | 9 ++--- .../Netty4HttpServerPipeliningTests.java | 4 +- .../http/netty4/Netty4PipeliningIT.java | 10 ----- .../Netty4HttpPipeliningHandlerTests.java | 5 +-- .../http/nio/HttpReadWriteHandler.java | 11 ++---- .../http/nio/HttpWriteOperation.java | 9 ++--- .../http/nio/NioHttpChannel.java | 3 +- .../http/nio/NioHttpResponse.java | 37 +++++++++++++++++++ .../pipelining/NioHttpPipeliningHandler.java | 18 ++++----- .../http/nio/HttpReadWriteHandlerTests.java | 4 +- ...esponse.java => HttpPipelinedMessage.java} | 12 ++---- .../http/HttpPipelinedRequest.java | 11 ++---- .../http/HttpPipeliningAggregator.java | 17 +++++---- 16 files changed, 122 insertions(+), 87 deletions(-) create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java create mode 100644 plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpResponse.java rename server/src/main/java/org/elasticsearch/http/{HttpPipelinedResponse.java => HttpPipelinedMessage.java} (73%) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index 2c8e11cf94f4d..4eee731d2aca9 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -41,8 +41,8 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; +import org.elasticsearch.http.netty4.pipelining.Netty4HttpResponse; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; @@ -135,7 +135,7 @@ public void sendResponse(RestResponse response) { promise.addListener(ChannelFutureListener.CLOSE); } - HttpPipelinedResponse newResponse = new HttpPipelinedResponse<>(sequence, resp); + Netty4HttpResponse newResponse = new Netty4HttpResponse(sequence, resp); channel.writeAndFlush(newResponse, promise); releaseContent = false; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java index 239ebad734c39..8d9782d50b4a8 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java @@ -22,17 +22,14 @@ import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; -import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.LastHttpContent; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpPipeliningAggregator; import org.elasticsearch.transport.netty4.Netty4Utils; import java.nio.channels.ClosedChannelException; -import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -42,7 +39,7 @@ public class HttpPipeliningHandler extends ChannelDuplexHandler { private final Logger logger; - private final HttpPipeliningAggregator aggregator; + private final HttpPipeliningAggregator aggregator; /** * Construct a new pipelining handler; this handler should be used downstream of HTTP decoding/aggregation. @@ -67,15 +64,14 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw } @Override - @SuppressWarnings("unchecked") public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { - if (msg instanceof HttpPipelinedResponse) { - HttpPipelinedResponse response = (HttpPipelinedResponse) msg; + if (msg instanceof Netty4HttpResponse) { + Netty4HttpResponse response = (Netty4HttpResponse) msg; boolean success = false; try { - List, ChannelPromise>> readyResponses = aggregator.write(response, promise); + List> readyResponses = aggregator.write(response, promise); success = true; - for (Tuple, ChannelPromise> readyResponse : readyResponses) { + for (Tuple readyResponse : readyResponses) { ctx.write(readyResponse.v1().getResponse(), readyResponse.v2()); } } catch (IllegalStateException e) { @@ -93,11 +89,11 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann @Override public void close(ChannelHandlerContext ctx, ChannelPromise promise) { - List, ChannelPromise>> inflightResponses = aggregator.removeAllInflightResponses(); + List> inflightResponses = aggregator.removeAllInflightResponses(); if (inflightResponses.isEmpty() == false) { ClosedChannelException closedChannelException = new ClosedChannelException(); - for (Tuple, ChannelPromise> inflightResponse : inflightResponses) { + for (Tuple inflightResponse : inflightResponses) { try { inflightResponse.v2().setFailure(closedChannelException); } catch (RuntimeException e) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java new file mode 100644 index 0000000000000..a917c171e2e99 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.netty4.pipelining; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.http.HttpPipelinedMessage; + +public class Netty4HttpResponse extends HttpPipelinedMessage { + + private final FullHttpResponse response; + + public Netty4HttpResponse(int sequence, FullHttpResponse response) { + super(sequence); + this.response = response; + } + + public FullHttpResponse getResponse() { + return response; + } +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java index c889618516a87..54c6dc20d6e63 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -57,11 +57,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; +import org.elasticsearch.http.netty4.pipelining.Netty4HttpResponse; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestResponse; @@ -204,7 +203,6 @@ public void testThatAnyOriginWorks() { assertThat(response.headers().get(HttpHeaderNames.ACCESS_CONTROL_ALLOW_CREDENTIALS), nullValue()); } - @SuppressWarnings("unchecked") public void testHeadersSet() { Settings settings = Settings.builder().build(); try (Netty4HttpServerTransport httpServerTransport = @@ -229,7 +227,7 @@ public void testHeadersSet() { // inspect what was written List writtenObjects = writeCapturingChannel.getWrittenObjects(); assertThat(writtenObjects.size(), is(1)); - HttpResponse response = ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); + HttpResponse response = ((Netty4HttpResponse) writtenObjects.get(0)).getResponse(); assertThat(response.headers().get("non-existent-header"), nullValue()); assertThat(response.headers().get(customHeader), equalTo(customHeaderValue)); assertThat(response.headers().get(HttpHeaderNames.CONTENT_LENGTH), equalTo(Integer.toString(resp.content().length()))); @@ -323,7 +321,6 @@ private FullHttpResponse executeRequest(final Settings settings, final String ho return executeRequest(settings, null, host); } - @SuppressWarnings("unchecked") private FullHttpResponse executeRequest(final Settings settings, final String originValue, final String host) { // construct request and send it over the transport layer try (Netty4HttpServerTransport httpServerTransport = @@ -347,7 +344,7 @@ private FullHttpResponse executeRequest(final Settings settings, final String or // get the response List writtenObjects = writeCapturingChannel.getWrittenObjects(); assertThat(writtenObjects.size(), is(1)); - return ((HttpPipelinedResponse) writtenObjects.get(0)).getResponse(); + return ((Netty4HttpResponse) writtenObjects.get(0)).getResponse(); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index 7652dfe38faab..ba5caec6de7bd 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -39,9 +39,9 @@ import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.NullDispatcher; +import org.elasticsearch.http.netty4.pipelining.Netty4HttpResponse; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -204,7 +204,7 @@ public void run() { } final ChannelPromise promise = ctx.newPromise(); - ctx.writeAndFlush(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse), promise); + ctx.writeAndFlush(new Netty4HttpResponse(pipelinedRequest.getSequence(), httpResponse), promise); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java index 858f42334226a..ebb91d9663ed5 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4PipeliningIT.java @@ -21,8 +21,6 @@ import io.netty.handler.codec.http.FullHttpResponse; import org.elasticsearch.ESNetty4IntegTestCase; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -42,14 +40,6 @@ protected boolean addMockHttpTransport() { return false; // enable http } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("http.pipelining", true) - .build(); - } - public void testThatNettyHttpServerSupportsPipelining() throws Exception { String[] requests = new String[]{"/", "/_nodes/stats", "/", "/_cluster/state", "/"}; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java index aea8baffeb9f7..67c4a78bbcf19 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java @@ -38,7 +38,6 @@ import io.netty.handler.codec.http.QueryStringDecoder; import org.elasticsearch.common.Randomness; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -230,7 +229,7 @@ public void testPipeliningRequestsAreReleased() throws InterruptedException { ChannelPromise promise = embeddedChannel.newPromise(); promises.add(promise); int sequence = requests.get(i).getSequence(); - HttpPipelinedResponse resp = new HttpPipelinedResponse<>(sequence, httpResponse); + Netty4HttpResponse resp = new Netty4HttpResponse(sequence, httpResponse); embeddedChannel.writeAndFlush(resp, promise); } @@ -295,7 +294,7 @@ protected void channelRead0(final ChannelHandlerContext ctx, HttpPipelinedReques waitingLatch.await(1000, TimeUnit.SECONDS); final ChannelPromise promise = ctx.newPromise(); eventLoopService.submit(() -> { - ctx.write(new HttpPipelinedResponse<>(pipelinedRequest.getSequence(), httpResponse), promise); + ctx.write(new Netty4HttpResponse(pipelinedRequest.getSequence(), httpResponse), promise); finishingLatch.countDown(); }); } catch (InterruptedException e) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index f0407d12687e3..5391dd2847fd8 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -25,7 +25,6 @@ import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.handler.codec.http.FullHttpRequest; -import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpContentCompressor; import io.netty.handler.codec.http.HttpContentDecompressor; import io.netty.handler.codec.http.HttpHeaders; @@ -36,12 +35,11 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.nio.pipelining.NioHttpPipeliningHandler; import org.elasticsearch.nio.FlushOperation; import org.elasticsearch.nio.InboundChannelBuffer; -import org.elasticsearch.nio.ReadWriteHandler; import org.elasticsearch.nio.NioSocketChannel; +import org.elasticsearch.nio.ReadWriteHandler; import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.nio.WriteOperation; import org.elasticsearch.rest.RestRequest; @@ -98,11 +96,10 @@ public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException { } @Override - @SuppressWarnings("unchecked") public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer listener) { - assert message instanceof HttpPipelinedResponse : "This channel only supports messages that are of type: " - + HttpPipelinedResponse.class + ". Found type: " + message.getClass() + "."; - return new HttpWriteOperation(context, (HttpPipelinedResponse) message, listener); + assert message instanceof NioHttpResponse : "This channel only supports messages that are of type: " + + NioHttpResponse.class + ". Found type: " + message.getClass() + "."; + return new HttpWriteOperation(context, (NioHttpResponse) message, listener); } @Override diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java index 485710165f421..8ddce7a5b73b5 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpWriteOperation.java @@ -19,8 +19,6 @@ package org.elasticsearch.http.nio; -import io.netty.handler.codec.http.FullHttpResponse; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.nio.WriteOperation; @@ -29,11 +27,10 @@ public class HttpWriteOperation implements WriteOperation { private final SocketChannelContext channelContext; - private final HttpPipelinedResponse response; + private final NioHttpResponse response; private final BiConsumer listener; - HttpWriteOperation(SocketChannelContext channelContext, HttpPipelinedResponse response, - BiConsumer listener) { + HttpWriteOperation(SocketChannelContext channelContext, NioHttpResponse response, BiConsumer listener) { this.channelContext = channelContext; this.response = response; this.listener = listener; @@ -50,7 +47,7 @@ public SocketChannelContext getChannel() { } @Override - public HttpPipelinedResponse getObject() { + public NioHttpResponse getObject() { return response; } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java index 75cb7d2154c1d..97eba20a16f16 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.RestResponse; @@ -122,7 +121,7 @@ public void sendResponse(RestResponse response) { } BiConsumer listener = (aVoid, throwable) -> Releasables.close(toClose); - nioChannel.getContext().sendMessage(new HttpPipelinedResponse<>(sequence, resp), listener); + nioChannel.getContext().sendMessage(new NioHttpResponse(sequence, resp), listener); success = true; } finally { if (success == false) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpResponse.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpResponse.java new file mode 100644 index 0000000000000..4b634994b4557 --- /dev/null +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpResponse.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.http.HttpPipelinedMessage; + +public class NioHttpResponse extends HttpPipelinedMessage { + + private final FullHttpResponse response; + + public NioHttpResponse(int sequence, FullHttpResponse response) { + super(sequence); + this.response = response; + } + + public FullHttpResponse getResponse() { + return response; + } +} diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java index 9b3210dd523c2..34ab34a09a8de 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java @@ -22,14 +22,13 @@ import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; -import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.LastHttpContent; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.http.HttpPipeliningAggregator; import org.elasticsearch.http.nio.NettyListener; +import org.elasticsearch.http.nio.NioHttpResponse; import java.nio.channels.ClosedChannelException; import java.util.List; @@ -40,7 +39,7 @@ public class NioHttpPipeliningHandler extends ChannelDuplexHandler { private final Logger logger; - private final HttpPipeliningAggregator aggregator; + private final HttpPipeliningAggregator aggregator; /** * Construct a new pipelining handler; this handler should be used downstream of HTTP decoding/aggregation. @@ -65,16 +64,15 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw } @Override - @SuppressWarnings("unchecked") public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) { - if (msg instanceof HttpPipelinedResponse) { - HttpPipelinedResponse response = (HttpPipelinedResponse) msg; + if (msg instanceof NioHttpResponse) { + NioHttpResponse response = (NioHttpResponse) msg; boolean success = false; try { NettyListener listener = NettyListener.fromChannelPromise(promise); - List, NettyListener>> readyResponses = aggregator.write(response, listener); + List> readyResponses = aggregator.write(response, listener); success = true; - for (Tuple, NettyListener> responseToWrite : readyResponses) { + for (Tuple responseToWrite : readyResponses) { ctx.write(responseToWrite.v1().getResponse(), responseToWrite.v2()); } } catch (IllegalStateException e) { @@ -91,11 +89,11 @@ public void write(final ChannelHandlerContext ctx, final Object msg, final Chann @Override public void close(ChannelHandlerContext ctx, ChannelPromise promise) { - List, NettyListener>> inflightResponses = aggregator.removeAllInflightResponses(); + List> inflightResponses = aggregator.removeAllInflightResponses(); if (inflightResponses.isEmpty() == false) { ClosedChannelException closedChannelException = new ClosedChannelException(); - for (Tuple, NettyListener> inflightResponse : inflightResponses) { + for (Tuple inflightResponse : inflightResponses) { try { inflightResponse.v2().setFailure(closedChannelException); } catch (RuntimeException e) { diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java index 416f3cb791f54..cc8eeb77cc2f6 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpHandlingSettings; -import org.elasticsearch.http.HttpPipelinedResponse; import org.elasticsearch.nio.FlushOperation; import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.NioSocketChannel; @@ -67,7 +66,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; public class HttpReadWriteHandlerTests extends ESTestCase { @@ -173,7 +171,7 @@ public void testEncodeHttpResponse() throws IOException { prepareHandlerForResponse(handler); FullHttpResponse fullHttpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK); - HttpPipelinedResponse pipelinedResponse = new HttpPipelinedResponse<>(0, fullHttpResponse); + NioHttpResponse pipelinedResponse = new NioHttpResponse(0, fullHttpResponse); SocketChannelContext context = mock(SocketChannelContext.class); HttpWriteOperation writeOperation = new HttpWriteOperation(context, pipelinedResponse, mock(BiConsumer.class)); diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java b/server/src/main/java/org/elasticsearch/http/HttpPipelinedMessage.java similarity index 73% rename from server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java rename to server/src/main/java/org/elasticsearch/http/HttpPipelinedMessage.java index cd1082f080747..7db8666e73ae3 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpPipelinedResponse.java +++ b/server/src/main/java/org/elasticsearch/http/HttpPipelinedMessage.java @@ -18,26 +18,20 @@ */ package org.elasticsearch.http; -public class HttpPipelinedResponse implements Comparable> { +public class HttpPipelinedMessage implements Comparable { private final int sequence; - private final Response response; - public HttpPipelinedResponse(int sequence, Response response) { + public HttpPipelinedMessage(int sequence) { this.sequence = sequence; - this.response = response; } public int getSequence() { return sequence; } - public Response getResponse() { - return response; - } - @Override - public int compareTo(HttpPipelinedResponse o) { + public int compareTo(HttpPipelinedMessage o) { return Integer.compare(sequence, o.sequence); } } diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java b/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java index 69deaaf072344..df8bd7ee1eb80 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java +++ b/server/src/main/java/org/elasticsearch/http/HttpPipelinedRequest.java @@ -18,20 +18,15 @@ */ package org.elasticsearch.http; -public class HttpPipelinedRequest { +public class HttpPipelinedRequest extends HttpPipelinedMessage { - private final int sequence; private final R request; - public HttpPipelinedRequest(int sequence, R request) { - this.sequence = sequence; + HttpPipelinedRequest(int sequence, R request) { + super(sequence); this.request = request; } - public int getSequence() { - return sequence; - } - public R getRequest() { return request; } diff --git a/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java b/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java index 91e44bff261f7..f38e9677979db 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java +++ b/server/src/main/java/org/elasticsearch/http/HttpPipeliningAggregator.java @@ -21,13 +21,14 @@ import org.elasticsearch.common.collect.Tuple; import java.util.ArrayList; +import java.util.Comparator; import java.util.List; import java.util.PriorityQueue; -public class HttpPipeliningAggregator { +public class HttpPipeliningAggregator { private final int maxEventsHeld; - private final PriorityQueue, Listener>> outboundHoldingQueue; + private final PriorityQueue> outboundHoldingQueue; /* * The current read and write sequence numbers. Read sequence numbers are attached to requests in the order they are read from the * channel, and then transferred to responses. A response is not written to the channel context until its sequence number matches the @@ -38,23 +39,23 @@ public class HttpPipeliningAggregator { public HttpPipeliningAggregator(int maxEventsHeld) { this.maxEventsHeld = maxEventsHeld; - this.outboundHoldingQueue = new PriorityQueue<>(1); + this.outboundHoldingQueue = new PriorityQueue<>(1, Comparator.comparing(Tuple::v1)); } public HttpPipelinedRequest read(final Request request) { return new HttpPipelinedRequest<>(readSequence++, request); } - public List, Listener>> write(final HttpPipelinedResponse response, Listener listener) { + public List> write(final Response response, Listener listener) { if (outboundHoldingQueue.size() < maxEventsHeld) { - ArrayList, Listener>> readyResponses = new ArrayList<>(); + ArrayList> readyResponses = new ArrayList<>(); outboundHoldingQueue.add(new Tuple<>(response, listener)); while (!outboundHoldingQueue.isEmpty()) { /* * Since the response with the lowest sequence number is the top of the priority queue, we know if its sequence * number does not match the current write sequence number then we have not processed all preceding responses yet. */ - final Tuple, Listener> top = outboundHoldingQueue.peek(); + final Tuple top = outboundHoldingQueue.peek(); if (top.v1().getSequence() != writeSequence) { break; @@ -72,8 +73,8 @@ public List, Listener>> write(final HttpPi } } - public List, Listener>> removeAllInflightResponses() { - ArrayList, Listener>> responses = new ArrayList<>(outboundHoldingQueue); + public List> removeAllInflightResponses() { + ArrayList> responses = new ArrayList<>(outboundHoldingQueue); outboundHoldingQueue.clear(); return responses; } From 15eac7839c62056d69d1c1273233ba893a290a99 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 10:47:49 -0600 Subject: [PATCH 06/11] Remove references to setting --- docs/reference/modules/http.asciidoc | 2 -- .../http/netty4/Netty4HttpServerPipeliningTests.java | 1 - 2 files changed, 3 deletions(-) diff --git a/docs/reference/modules/http.asciidoc b/docs/reference/modules/http.asciidoc index 7f29a9db7f605..dab8e8136893e 100644 --- a/docs/reference/modules/http.asciidoc +++ b/docs/reference/modules/http.asciidoc @@ -96,8 +96,6 @@ and stack traces in response output. Note: When set to `false` and the `error_tr parameter is specified, an error will be returned; when `error_trace` is not specified, a simple message will be returned. Defaults to `true` -|`http.pipelining` |Enable or disable HTTP pipelining, defaults to `true`. - |`http.pipelining.max_events` |The maximum number of events to be queued up in memory before a HTTP connection is closed, defaults to `10000`. |`http.max_warning_header_count` |The maximum number of warning headers in diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index ba5caec6de7bd..de93fa7dbc3a0 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -83,7 +83,6 @@ public void shutdown() throws Exception { public void testThatHttpPipeliningWorks() throws Exception { final Settings settings = Settings.builder() - .put("http.pipelining", true) .put("http.port", "0") .build(); try (HttpServerTransport httpServerTransport = new CustomNettyHttpServerTransport(settings)) { From 7adb2b3abf660ff64041595a03039240dcc3a25e Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 11:12:08 -0600 Subject: [PATCH 07/11] tests --- .../HttpPipeliningHandler.java | 2 +- .../http/netty4/Netty4HttpChannel.java | 1 - .../{pipelining => }/Netty4HttpResponse.java | 2 +- .../netty4/Netty4HttpServerTransport.java | 1 - .../http/netty4/Netty4HttpChannelTests.java | 1 - .../Netty4HttpPipeliningHandlerTests.java | 2 +- .../Netty4HttpServerPipeliningTests.java | 1 - .../http/nio/HttpReadWriteHandler.java | 1 - .../NioHttpPipeliningHandler.java | 2 +- .../org/elasticsearch/NioIntegTestCase.java | 5 +- .../nio/NioHttpPipeliningHandlerTests.java | 305 ++++++++++++++++++ .../http/nio/NioPipeliningIT.java | 68 ++++ 12 files changed, 381 insertions(+), 10 deletions(-) rename modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/{pipelining => }/HttpPipeliningHandler.java (98%) rename modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/{pipelining => }/Netty4HttpResponse.java (96%) rename modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/{pipelining => }/Netty4HttpPipeliningHandlerTests.java (99%) rename plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/{pipelining => }/NioHttpPipeliningHandler.java (98%) create mode 100644 plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java create mode 100644 plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioPipeliningIT.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/HttpPipeliningHandler.java similarity index 98% rename from modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java rename to modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/HttpPipeliningHandler.java index 8d9782d50b4a8..94228bd9da3da 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/HttpPipeliningHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty4.pipelining; +package org.elasticsearch.http.netty4; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java index 4eee731d2aca9..cb31d44454452 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; -import org.elasticsearch.http.netty4.pipelining.Netty4HttpResponse; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java similarity index 96% rename from modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java rename to modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java index a917c171e2e99..779c9125a2e42 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpResponse.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpResponse.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty4.pipelining; +package org.elasticsearch.http.netty4; import io.netty.handler.codec.http.FullHttpResponse; import org.elasticsearch.http.HttpPipelinedMessage; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 9b26527dd489c..0b9b799f78e67 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -62,7 +62,6 @@ import org.elasticsearch.http.netty4.cors.Netty4CorsConfig; import org.elasticsearch.http.netty4.cors.Netty4CorsConfigBuilder; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; -import org.elasticsearch.http.netty4.pipelining.HttpPipeliningHandler; import org.elasticsearch.rest.RestUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty4.Netty4OpenChannelsHandler; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java index 54c6dc20d6e63..7c5b35a322996 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpChannelTests.java @@ -60,7 +60,6 @@ import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.http.NullDispatcher; import org.elasticsearch.http.netty4.cors.Netty4CorsHandler; -import org.elasticsearch.http.netty4.pipelining.Netty4HttpResponse; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestResponse; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java similarity index 99% rename from modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java rename to modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 67c4a78bbcf19..7ea1a1034294d 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/pipelining/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty4.pipelining; +package org.elasticsearch.http.netty4; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufUtil; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java index de93fa7dbc3a0..f2b28b909187b 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.http.HttpPipelinedRequest; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.NullDispatcher; -import org.elasticsearch.http.netty4.pipelining.Netty4HttpResponse; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index 5391dd2847fd8..913ba2fe956ea 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.HttpPipelinedRequest; -import org.elasticsearch.http.nio.pipelining.NioHttpPipeliningHandler; import org.elasticsearch.nio.FlushOperation; import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.NioSocketChannel; diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java similarity index 98% rename from plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java rename to plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java index 34ab34a09a8de..a4fa413f4291d 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/pipelining/NioHttpPipeliningHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.nio.pipelining; +package org.elasticsearch.http.nio; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/NioIntegTestCase.java b/plugins/transport-nio/src/test/java/org/elasticsearch/NioIntegTestCase.java index e0c8bacca1d85..703f7acbf8257 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/NioIntegTestCase.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/NioIntegTestCase.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.http.nio.NioHttpServerTransport; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.nio.NioTransport; @@ -43,11 +44,13 @@ protected boolean addMockTransportService() { @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); - // randomize netty settings + // randomize nio settings if (randomBoolean()) { builder.put(NioTransport.NIO_WORKER_COUNT.getKey(), random().nextInt(3) + 1); + builder.put(NioHttpServerTransport.NIO_HTTP_WORKER_COUNT.getKey(), random().nextInt(3) + 1); } builder.put(NetworkModule.TRANSPORT_TYPE_KEY, NioTransportPlugin.NIO_TRANSPORT_NAME); + builder.put(NetworkModule.HTTP_TYPE_KEY, NioTransportPlugin.NIO_HTTP_TRANSPORT_NAME); return builder.build(); } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java new file mode 100644 index 0000000000000..473cd68ef3b57 --- /dev/null +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java @@ -0,0 +1,305 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPromise; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.FullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; +import io.netty.handler.codec.http.QueryStringDecoder; +import org.elasticsearch.common.Randomness; +import org.elasticsearch.http.HttpPipelinedRequest; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; + +import java.nio.channels.ClosedChannelException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedTransferQueue; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; +import static io.netty.handler.codec.http.HttpResponseStatus.OK; +import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; +import static org.hamcrest.core.Is.is; + +public class NioHttpPipeliningHandlerTests extends ESTestCase { + + private final ExecutorService handlerService = Executors.newFixedThreadPool(randomIntBetween(4, 8)); + private final ExecutorService eventLoopService = Executors.newFixedThreadPool(1); + private final Map waitingRequests = new ConcurrentHashMap<>(); + private final Map finishingRequests = new ConcurrentHashMap<>(); + + @After + public void tearDown() throws Exception { + waitingRequests.keySet().forEach(this::finishRequest); + shutdownExecutorService(); + super.tearDown(); + } + + private CountDownLatch finishRequest(String url) { + waitingRequests.get(url).countDown(); + return finishingRequests.get(url); + } + + private void shutdownExecutorService() throws InterruptedException { + if (!handlerService.isShutdown()) { + handlerService.shutdown(); + handlerService.awaitTermination(10, TimeUnit.SECONDS); + } + if (!eventLoopService.isShutdown()) { + eventLoopService.shutdown(); + eventLoopService.awaitTermination(10, TimeUnit.SECONDS); + } + } + + public void testThatPipeliningWorksWithFastSerializedRequests() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new NioHttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler()); + + for (int i = 0; i < numberOfRequests; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); + } + + final List latches = new ArrayList<>(); + for (final String url : waitingRequests.keySet()) { + latches.add(finishRequest(url)); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + embeddedChannel.flush(); + + for (int i = 0; i < numberOfRequests; i++) { + assertReadHttpMessageHasContent(embeddedChannel, String.valueOf(i)); + } + + assertTrue(embeddedChannel.isOpen()); + } + + public void testThatPipeliningWorksWhenSlowRequestsInDifferentOrder() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new NioHttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler()); + + for (int i = 0; i < numberOfRequests; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + String.valueOf(i))); + } + + // random order execution + final List urls = new ArrayList<>(waitingRequests.keySet()); + Randomness.shuffle(urls); + final List latches = new ArrayList<>(); + for (final String url : urls) { + latches.add(finishRequest(url)); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + embeddedChannel.flush(); + + for (int i = 0; i < numberOfRequests; i++) { + assertReadHttpMessageHasContent(embeddedChannel, String.valueOf(i)); + } + + assertTrue(embeddedChannel.isOpen()); + } + + public void testThatPipeliningWorksWithChunkedRequests() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = + new EmbeddedChannel( + new AggregateUrisAndHeadersHandler(), + new NioHttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler()); + + for (int i = 0; i < numberOfRequests; i++) { + final DefaultHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/" + i); + embeddedChannel.writeInbound(request); + embeddedChannel.writeInbound(LastHttpContent.EMPTY_LAST_CONTENT); + } + + final List latches = new ArrayList<>(); + for (int i = numberOfRequests - 1; i >= 0; i--) { + latches.add(finishRequest(Integer.toString(i))); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + embeddedChannel.flush(); + + for (int i = 0; i < numberOfRequests; i++) { + assertReadHttpMessageHasContent(embeddedChannel, Integer.toString(i)); + } + + assertTrue(embeddedChannel.isOpen()); + } + + public void testThatPipeliningClosesConnectionWithTooManyEvents() throws InterruptedException { + final int numberOfRequests = randomIntBetween(2, 128); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new NioHttpPipeliningHandler(logger, numberOfRequests), + new WorkEmulatorHandler()); + + for (int i = 0; i < 1 + numberOfRequests + 1; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + Integer.toString(i))); + } + + final List latches = new ArrayList<>(); + final List requests = IntStream.range(1, numberOfRequests + 1).boxed().collect(Collectors.toList()); + Randomness.shuffle(requests); + + for (final Integer request : requests) { + latches.add(finishRequest(request.toString())); + } + + for (final CountDownLatch latch : latches) { + latch.await(); + } + + finishRequest(Integer.toString(numberOfRequests + 1)).await(); + + embeddedChannel.flush(); + + assertFalse(embeddedChannel.isOpen()); + } + + public void testPipeliningRequestsAreReleased() throws InterruptedException { + final int numberOfRequests = 10; + final EmbeddedChannel embeddedChannel = + new EmbeddedChannel(new NioHttpPipeliningHandler(logger, numberOfRequests + 1)); + + for (int i = 0; i < numberOfRequests; i++) { + embeddedChannel.writeInbound(createHttpRequest("/" + i)); + } + + HttpPipelinedRequest inbound; + ArrayList> requests = new ArrayList<>(); + while ((inbound = embeddedChannel.readInbound()) != null) { + requests.add(inbound); + } + + ArrayList promises = new ArrayList<>(); + for (int i = 1; i < requests.size(); ++i) { + final FullHttpResponse httpResponse = new DefaultFullHttpResponse(HTTP_1_1, OK); + ChannelPromise promise = embeddedChannel.newPromise(); + promises.add(promise); + int sequence = requests.get(i).getSequence(); + NioHttpResponse resp = new NioHttpResponse(sequence, httpResponse); + embeddedChannel.writeAndFlush(resp, promise); + } + + for (ChannelPromise promise : promises) { + assertFalse(promise.isDone()); + } + embeddedChannel.close().syncUninterruptibly(); + for (ChannelPromise promise : promises) { + assertTrue(promise.isDone()); + assertTrue(promise.cause() instanceof ClosedChannelException); + } + } + + private void assertReadHttpMessageHasContent(EmbeddedChannel embeddedChannel, String expectedContent) { + FullHttpResponse response = (FullHttpResponse) embeddedChannel.outboundMessages().poll(); + assertNotNull("Expected response to exist, maybe you did not wait long enough?", response); + assertNotNull("Expected response to have content " + expectedContent, response.content()); + String data = new String(ByteBufUtil.getBytes(response.content()), StandardCharsets.UTF_8); + assertThat(data, is(expectedContent)); + } + + private FullHttpRequest createHttpRequest(String uri) { + return new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uri); + } + + private static class AggregateUrisAndHeadersHandler extends SimpleChannelInboundHandler { + + static final Queue QUEUE_URI = new LinkedTransferQueue<>(); + + @Override + protected void channelRead0(ChannelHandlerContext ctx, HttpRequest request) throws Exception { + QUEUE_URI.add(request.uri()); + } + + } + + private class WorkEmulatorHandler extends SimpleChannelInboundHandler> { + + @Override + protected void channelRead0(final ChannelHandlerContext ctx, HttpPipelinedRequest pipelinedRequest) { + LastHttpContent request = pipelinedRequest.getRequest(); + final QueryStringDecoder decoder; + if (request instanceof FullHttpRequest) { + decoder = new QueryStringDecoder(((FullHttpRequest)request).uri()); + } else { + decoder = new QueryStringDecoder(AggregateUrisAndHeadersHandler.QUEUE_URI.poll()); + } + + final String uri = decoder.path().replace("/", ""); + final ByteBuf content = Unpooled.copiedBuffer(uri, StandardCharsets.UTF_8); + final DefaultFullHttpResponse httpResponse = new DefaultFullHttpResponse(HTTP_1_1, OK, content); + httpResponse.headers().add(CONTENT_LENGTH, content.readableBytes()); + + final CountDownLatch waitingLatch = new CountDownLatch(1); + waitingRequests.put(uri, waitingLatch); + final CountDownLatch finishingLatch = new CountDownLatch(1); + finishingRequests.put(uri, finishingLatch); + + handlerService.submit(() -> { + try { + waitingLatch.await(1000, TimeUnit.SECONDS); + final ChannelPromise promise = ctx.newPromise(); + eventLoopService.submit(() -> { + ctx.write(new NioHttpResponse(pipelinedRequest.getSequence(), httpResponse), promise); + finishingLatch.countDown(); + }); + } catch (InterruptedException e) { + fail(e.toString()); + } + }); + } + } +} diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioPipeliningIT.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioPipeliningIT.java new file mode 100644 index 0000000000000..074aafd6eab4b --- /dev/null +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioPipeliningIT.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.NioIntegTestCase; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; + +import java.util.Collection; +import java.util.Locale; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) +public class NioPipeliningIT extends NioIntegTestCase { + + @Override + protected boolean addMockHttpTransport() { + return false; // enable http + } + + public void testThatNioHttpServerSupportsPipelining() throws Exception { + String[] requests = new String[]{"/", "/_nodes/stats", "/", "/_cluster/state", "/"}; + + HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); + TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); + TransportAddress transportAddress = randomFrom(boundAddresses); + + try (Netty4HttpClient nettyHttpClient = new Netty4HttpClient()) { + Collection responses = nettyHttpClient.get(transportAddress.address(), requests); + assertThat(responses, hasSize(5)); + + Collection opaqueIds = Netty4HttpClient.returnOpaqueIds(responses); + assertOpaqueIdsInOrder(opaqueIds); + } + } + + private void assertOpaqueIdsInOrder(Collection opaqueIds) { + // check if opaque ids are monotonically increasing + int i = 0; + String msg = String.format(Locale.ROOT, "Expected list of opaque ids to be monotonically increasing, got [%s]", opaqueIds); + for (String opaqueId : opaqueIds) { + assertThat(msg, opaqueId, is(String.valueOf(i++))); + } + } + +} From a884eaa9fba04942d769c382ce61c6e7ad9fef76 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 11:16:31 -0600 Subject: [PATCH 08/11] Rename handler --- ...ngHandler.java => Netty4HttpPipeliningHandler.java} | 4 ++-- .../http/netty4/Netty4HttpServerTransport.java | 2 +- .../http/netty4/Netty4HttpPipeliningHandlerTests.java | 10 +++++----- 3 files changed, 8 insertions(+), 8 deletions(-) rename modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/{HttpPipeliningHandler.java => Netty4HttpPipeliningHandler.java} (96%) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java similarity index 96% rename from modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/HttpPipeliningHandler.java rename to modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index 94228bd9da3da..edd4362f003a5 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -36,7 +36,7 @@ /** * Implements HTTP pipelining ordering, ensuring that responses are completely served in the same order as their corresponding requests. */ -public class HttpPipeliningHandler extends ChannelDuplexHandler { +public class Netty4HttpPipeliningHandler extends ChannelDuplexHandler { private final Logger logger; private final HttpPipeliningAggregator aggregator; @@ -48,7 +48,7 @@ public class HttpPipeliningHandler extends ChannelDuplexHandler { * @param maxEventsHeld the maximum number of channel events that will be retained prior to aborting the channel connection; this is * required as events cannot queue up indefinitely */ - public HttpPipeliningHandler(Logger logger, final int maxEventsHeld) { + public Netty4HttpPipeliningHandler(Logger logger, final int maxEventsHeld) { this.logger = logger; this.aggregator = new HttpPipeliningAggregator<>(maxEventsHeld); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 0b9b799f78e67..45e889797bde4 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -448,7 +448,7 @@ protected void initChannel(Channel ch) throws Exception { if (SETTING_CORS_ENABLED.get(transport.settings())) { ch.pipeline().addLast("cors", new Netty4CorsHandler(transport.getCorsConfig())); } - ch.pipeline().addLast("pipelining", new HttpPipeliningHandler(transport.logger, transport.pipeliningMaxEvents)); + ch.pipeline().addLast("pipelining", new Netty4HttpPipeliningHandler(transport.logger, transport.pipeliningMaxEvents)); ch.pipeline().addLast("handler", requestHandler); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 7ea1a1034294d..21151304424c1 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -93,7 +93,7 @@ private void shutdownExecutorService() throws InterruptedException { public void testThatPipeliningWorksWithFastSerializedRequests() throws InterruptedException { final int numberOfRequests = randomIntBetween(2, 128); - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new HttpPipeliningHandler(logger, numberOfRequests), + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests), new WorkEmulatorHandler()); for (int i = 0; i < numberOfRequests; i++) { @@ -120,7 +120,7 @@ public void testThatPipeliningWorksWithFastSerializedRequests() throws Interrupt public void testThatPipeliningWorksWhenSlowRequestsInDifferentOrder() throws InterruptedException { final int numberOfRequests = randomIntBetween(2, 128); - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new HttpPipeliningHandler(logger, numberOfRequests), + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests), new WorkEmulatorHandler()); for (int i = 0; i < numberOfRequests; i++) { @@ -153,7 +153,7 @@ public void testThatPipeliningWorksWithChunkedRequests() throws InterruptedExcep final EmbeddedChannel embeddedChannel = new EmbeddedChannel( new AggregateUrisAndHeadersHandler(), - new HttpPipeliningHandler(logger, numberOfRequests), + new Netty4HttpPipeliningHandler(logger, numberOfRequests), new WorkEmulatorHandler()); for (int i = 0; i < numberOfRequests; i++) { @@ -182,7 +182,7 @@ public void testThatPipeliningWorksWithChunkedRequests() throws InterruptedExcep public void testThatPipeliningClosesConnectionWithTooManyEvents() throws InterruptedException { final int numberOfRequests = randomIntBetween(2, 128); - final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new HttpPipeliningHandler(logger, numberOfRequests), + final EmbeddedChannel embeddedChannel = new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests), new WorkEmulatorHandler()); for (int i = 0; i < 1 + numberOfRequests + 1; i++) { @@ -211,7 +211,7 @@ public void testThatPipeliningClosesConnectionWithTooManyEvents() throws Interru public void testPipeliningRequestsAreReleased() throws InterruptedException { final int numberOfRequests = 10; final EmbeddedChannel embeddedChannel = - new EmbeddedChannel(new HttpPipeliningHandler(logger, numberOfRequests + 1)); + new EmbeddedChannel(new Netty4HttpPipeliningHandler(logger, numberOfRequests + 1)); for (int i = 0; i < numberOfRequests; i++) { embeddedChannel.writeInbound(createHttpRequest("/" + i)); From 7bad406efbc55b75ff059c0b28ff580a94ade046 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 11:23:45 -0600 Subject: [PATCH 09/11] Add assertions and release request --- .../netty4/Netty4HttpPipeliningHandler.java | 35 +++-- .../http/netty4/Netty4HttpRequestHandler.java | 4 - .../http/nio/HttpReadWriteHandler.java | 131 +++++++++--------- .../http/nio/NioHttpPipeliningHandler.java | 35 +++-- 4 files changed, 100 insertions(+), 105 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index edd4362f003a5..01c9fc9e665cb 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -54,7 +54,7 @@ public Netty4HttpPipeliningHandler(Logger logger, final int maxEventsHeld) { } @Override - public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception { + public void channelRead(final ChannelHandlerContext ctx, final Object msg) { if (msg instanceof LastHttpContent) { HttpPipelinedRequest pipelinedRequest = aggregator.read(((LastHttpContent) msg).retain()); ctx.fireChannelRead(pipelinedRequest); @@ -65,25 +65,22 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw @Override public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { - if (msg instanceof Netty4HttpResponse) { - Netty4HttpResponse response = (Netty4HttpResponse) msg; - boolean success = false; - try { - List> readyResponses = aggregator.write(response, promise); - success = true; - for (Tuple readyResponse : readyResponses) { - ctx.write(readyResponse.v1().getResponse(), readyResponse.v2()); - } - } catch (IllegalStateException e) { - ctx.channel().close(); - Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); - } finally { - if (success == false) { - promise.setFailure(new ClosedChannelException()); - } + assert msg instanceof Netty4HttpResponse : "Message must be type: " + Netty4HttpResponse.class; + Netty4HttpResponse response = (Netty4HttpResponse) msg; + boolean success = false; + try { + List> readyResponses = aggregator.write(response, promise); + success = true; + for (Tuple readyResponse : readyResponses) { + ctx.write(readyResponse.v1().getResponse(), readyResponse.v2()); + } + } catch (IllegalStateException e) { + ctx.channel().close(); + Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); + } finally { + if (success == false) { + promise.setFailure(new ClosedChannelException()); } - } else { - ctx.write(msg, promise); } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java index 643ce3c0be67f..c3a010226a408 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java @@ -130,10 +130,6 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpPipelinedRequest pipelinedRequest = (HttpPipelinedRequest) msg; FullHttpRequest request = pipelinedRequest.getRequest(); - final FullHttpRequest copiedRequest = - new DefaultFullHttpRequest( - request.protocolVersion(), - request.method(), - request.uri(), - Unpooled.copiedBuffer(request.content()), - request.headers(), - request.trailingHeaders()); - - Exception badRequestCause = null; - - /* - * We want to create a REST request from the incoming request from Netty. However, creating this request could fail if there - * are incorrectly encoded parameters, or the Content-Type header is invalid. If one of these specific failures occurs, we - * attempt to create a REST request again without the input that caused the exception (e.g., we remove the Content-Type header, - * or skip decoding the parameters). Once we have a request in hand, we then dispatch the request as a bad request with the - * underlying exception that caused us to treat the request as bad. - */ - final NioHttpRequest httpRequest; - { - NioHttpRequest innerHttpRequest; - try { - innerHttpRequest = new NioHttpRequest(xContentRegistry, copiedRequest); - } catch (final RestRequest.ContentTypeHeaderException e) { - badRequestCause = e; - innerHttpRequest = requestWithoutContentTypeHeader(copiedRequest, badRequestCause); - } catch (final RestRequest.BadParameterException e) { - badRequestCause = e; - innerHttpRequest = requestWithoutParameters(copiedRequest); + try { + final FullHttpRequest copiedRequest = + new DefaultFullHttpRequest( + request.protocolVersion(), + request.method(), + request.uri(), + Unpooled.copiedBuffer(request.content()), + request.headers(), + request.trailingHeaders()); + + Exception badRequestCause = null; + + /* + * We want to create a REST request from the incoming request from Netty. However, creating this request could fail if there + * are incorrectly encoded parameters, or the Content-Type header is invalid. If one of these specific failures occurs, we + * attempt to create a REST request again without the input that caused the exception (e.g., we remove the Content-Type header, + * or skip decoding the parameters). Once we have a request in hand, we then dispatch the request as a bad request with the + * underlying exception that caused us to treat the request as bad. + */ + final NioHttpRequest httpRequest; + { + NioHttpRequest innerHttpRequest; + try { + innerHttpRequest = new NioHttpRequest(xContentRegistry, copiedRequest); + } catch (final RestRequest.ContentTypeHeaderException e) { + badRequestCause = e; + innerHttpRequest = requestWithoutContentTypeHeader(copiedRequest, badRequestCause); + } catch (final RestRequest.BadParameterException e) { + badRequestCause = e; + innerHttpRequest = requestWithoutParameters(copiedRequest); + } + httpRequest = innerHttpRequest; } - httpRequest = innerHttpRequest; - } - /* - * We now want to create a channel used to send the response on. However, creating this channel can fail if there are invalid - * parameter values for any of the filter_path, human, or pretty parameters. We detect these specific failures via an - * IllegalArgumentException from the channel constructor and then attempt to create a new channel that bypasses parsing of these - * parameter values. - */ - final NioHttpChannel channel; - { - NioHttpChannel innerChannel; - int sequence = pipelinedRequest.getSequence(); - try { - innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), httpRequest, sequence, settings, threadContext); - } catch (final IllegalArgumentException e) { - if (badRequestCause == null) { - badRequestCause = e; - } else { - badRequestCause.addSuppressed(e); + /* + * We now want to create a channel used to send the response on. However, creating this channel can fail if there are invalid + * parameter values for any of the filter_path, human, or pretty parameters. We detect these specific failures via an + * IllegalArgumentException from the channel constructor and then attempt to create a new channel that bypasses parsing of these + * parameter values. + */ + final NioHttpChannel channel; + { + NioHttpChannel innerChannel; + int sequence = pipelinedRequest.getSequence(); + try { + innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), httpRequest, sequence, settings, threadContext); + } catch (final IllegalArgumentException e) { + if (badRequestCause == null) { + badRequestCause = e; + } else { + badRequestCause.addSuppressed(e); + } + final NioHttpRequest innerRequest = + new NioHttpRequest( + xContentRegistry, + Collections.emptyMap(), // we are going to dispatch the request as a bad request, drop all parameters + copiedRequest.uri(), + copiedRequest); + innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), innerRequest, sequence, settings, threadContext); } - final NioHttpRequest innerRequest = - new NioHttpRequest( - xContentRegistry, - Collections.emptyMap(), // we are going to dispatch the request as a bad request, drop all parameters - copiedRequest.uri(), - copiedRequest); - innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), innerRequest, sequence, settings, threadContext); + channel = innerChannel; } - channel = innerChannel; - } - if (request.decoderResult().isFailure()) { - transport.dispatchBadRequest(httpRequest, channel, request.decoderResult().cause()); - } else if (badRequestCause != null) { - transport.dispatchBadRequest(httpRequest, channel, badRequestCause); - } else { - transport.dispatchRequest(httpRequest, channel); + if (request.decoderResult().isFailure()) { + transport.dispatchBadRequest(httpRequest, channel, request.decoderResult().cause()); + } else if (badRequestCause != null) { + transport.dispatchBadRequest(httpRequest, channel, badRequestCause); + } else { + transport.dispatchRequest(httpRequest, channel); + } + } finally { + // As we have copied the buffer, we can release the request + request.release(); } } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java index a4fa413f4291d..57a14e7819d4e 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpPipeliningHandler.java @@ -54,7 +54,7 @@ public NioHttpPipeliningHandler(Logger logger, final int maxEventsHeld) { } @Override - public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception { + public void channelRead(final ChannelHandlerContext ctx, final Object msg) { if (msg instanceof LastHttpContent) { HttpPipelinedRequest pipelinedRequest = aggregator.read(((LastHttpContent) msg).retain()); ctx.fireChannelRead(pipelinedRequest); @@ -65,25 +65,22 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) throw @Override public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) { - if (msg instanceof NioHttpResponse) { - NioHttpResponse response = (NioHttpResponse) msg; - boolean success = false; - try { - NettyListener listener = NettyListener.fromChannelPromise(promise); - List> readyResponses = aggregator.write(response, listener); - success = true; - for (Tuple responseToWrite : readyResponses) { - ctx.write(responseToWrite.v1().getResponse(), responseToWrite.v2()); - } - } catch (IllegalStateException e) { - ctx.channel().close(); - } finally { - if (success == false) { - promise.setFailure(new ClosedChannelException()); - } + assert msg instanceof NioHttpResponse : "Message must be type: " + NioHttpResponse.class; + NioHttpResponse response = (NioHttpResponse) msg; + boolean success = false; + try { + NettyListener listener = NettyListener.fromChannelPromise(promise); + List> readyResponses = aggregator.write(response, listener); + success = true; + for (Tuple responseToWrite : readyResponses) { + ctx.write(responseToWrite.v1().getResponse(), responseToWrite.v2()); + } + } catch (IllegalStateException e) { + ctx.channel().close(); + } finally { + if (success == false) { + promise.setFailure(new ClosedChannelException()); } - } else { - ctx.write(msg, promise); } } From ba82266a4a7a0c822d0287407b309f81f3c73334 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 17 May 2018 16:08:53 -0600 Subject: [PATCH 10/11] Fix checkstyle --- .../elasticsearch/http/nio/HttpReadWriteHandler.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index ebddb3ac9e28b..e3481e3c254d2 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -31,6 +31,7 @@ import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpResponseEncoder; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.HttpHandlingSettings; @@ -168,15 +169,16 @@ private void handleRequest(Object msg) { /* * We now want to create a channel used to send the response on. However, creating this channel can fail if there are invalid * parameter values for any of the filter_path, human, or pretty parameters. We detect these specific failures via an - * IllegalArgumentException from the channel constructor and then attempt to create a new channel that bypasses parsing of these - * parameter values. + * IllegalArgumentException from the channel constructor and then attempt to create a new channel that bypasses parsing of + * these parameter values. */ final NioHttpChannel channel; { NioHttpChannel innerChannel; int sequence = pipelinedRequest.getSequence(); + BigArrays bigArrays = transport.getBigArrays(); try { - innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), httpRequest, sequence, settings, threadContext); + innerChannel = new NioHttpChannel(nioChannel, bigArrays, httpRequest, sequence, settings, threadContext); } catch (final IllegalArgumentException e) { if (badRequestCause == null) { badRequestCause = e; @@ -189,7 +191,7 @@ private void handleRequest(Object msg) { Collections.emptyMap(), // we are going to dispatch the request as a bad request, drop all parameters copiedRequest.uri(), copiedRequest); - innerChannel = new NioHttpChannel(nioChannel, transport.getBigArrays(), innerRequest, sequence, settings, threadContext); + innerChannel = new NioHttpChannel(nioChannel, bigArrays, innerRequest, sequence, settings, threadContext); } channel = innerChannel; } From 948841f030c49c7f554a7a4cdf81787908727687 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 18 May 2018 10:04:59 -0600 Subject: [PATCH 11/11] Changes for review --- docs/reference/migration/migrate_7_0/settings.asciidoc | 10 +++++++++- .../http/netty4/Netty4HttpPipeliningHandler.java | 5 ++--- .../http/nio/NioHttpPipeliningHandlerTests.java | 3 +-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index d62d7e6065de0..7826afc05fa59 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -29,6 +29,14 @@ [[remove-http-enabled]] ==== Http enabled setting removed -The setting `http.enabled` previously allowed disabling binding to HTTP, only allowing +* The setting `http.enabled` previously allowed disabling binding to HTTP, only allowing use of the transport client. This setting has been removed, as the transport client will be removed in the future, thus requiring HTTP to always be enabled. + +[[remove-http-pipelining-setting]] +==== Http pipelining setting removed + +* The setting `http.pipelining` previously allowed disabling HTTP pipelining support. +This setting has been removed, as disabling http pipelining support on the server +provided little value. The setting `http.pipelining.max_events` can still be used to +limit the number of pipelined requests in-flight. diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index 01c9fc9e665cb..e930ffe7424a9 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -64,19 +64,18 @@ public void channelRead(final ChannelHandlerContext ctx, final Object msg) { } @Override - public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) throws Exception { + public void write(final ChannelHandlerContext ctx, final Object msg, final ChannelPromise promise) { assert msg instanceof Netty4HttpResponse : "Message must be type: " + Netty4HttpResponse.class; Netty4HttpResponse response = (Netty4HttpResponse) msg; boolean success = false; try { List> readyResponses = aggregator.write(response, promise); - success = true; for (Tuple readyResponse : readyResponses) { ctx.write(readyResponse.v1().getResponse(), readyResponse.v2()); } + success = true; } catch (IllegalStateException e) { ctx.channel().close(); - Netty4Utils.closeChannels(Collections.singletonList(ctx.channel())); } finally { if (success == false) { promise.setFailure(new ClosedChannelException()); diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java index 473cd68ef3b57..d12c608aeca2a 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpPipeliningHandlerTests.java @@ -69,10 +69,9 @@ public class NioHttpPipeliningHandlerTests extends ESTestCase { private final Map finishingRequests = new ConcurrentHashMap<>(); @After - public void tearDown() throws Exception { + public void cleanup() throws Exception { waitingRequests.keySet().forEach(this::finishRequest); shutdownExecutorService(); - super.tearDown(); } private CountDownLatch finishRequest(String url) {