From 6e9c3b0e550bcb305dfaaf76f7bca65085b14209 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 28 Sep 2023 13:17:08 -0400 Subject: [PATCH] ESQL: Track blocks (#100025) This tracks blocks from topn and a few other places. We're going to try and track blocks all the places. --- .../compute/operator/TopNBenchmark.java | 2 + .../org/elasticsearch/core/Releasables.java | 2 +- .../common/util/BytesRefArray.java | 9 +- .../common/util/MockBigArrays.java | 8 +- .../indices/CrankyCircuitBreakerService.java | 15 +- .../compute/data/BooleanArrayBlock.java | 1 + .../compute/data/BooleanBlockBuilder.java | 16 +- .../compute/data/BooleanVectorBuilder.java | 13 +- .../data/BooleanVectorFixedBuilder.java | 8 + .../compute/data/BytesRefArrayBlock.java | 3 +- .../compute/data/BytesRefArrayVector.java | 2 +- .../compute/data/BytesRefBlockBuilder.java | 29 +++- .../compute/data/BytesRefVectorBuilder.java | 30 +++- .../compute/data/DoubleArrayBlock.java | 1 + .../compute/data/DoubleBlockBuilder.java | 16 +- .../compute/data/DoubleVectorBuilder.java | 13 +- .../data/DoubleVectorFixedBuilder.java | 8 + .../compute/data/IntArrayBlock.java | 1 + .../compute/data/IntBlockBuilder.java | 16 +- .../compute/data/IntVectorBuilder.java | 13 +- .../compute/data/IntVectorFixedBuilder.java | 8 + .../compute/data/LongArrayBlock.java | 1 + .../compute/data/LongBlockBuilder.java | 16 +- .../compute/data/LongVectorBuilder.java | 13 +- .../compute/data/LongVectorFixedBuilder.java | 8 + .../topn/ResultBuilderForBoolean.java | 10 +- .../topn/ResultBuilderForBytesRef.java | 10 +- .../operator/topn/ResultBuilderForDouble.java | 10 +- .../operator/topn/ResultBuilderForInt.java | 10 +- .../operator/topn/ResultBuilderForLong.java | 10 +- .../compute/data/AbstractBlockBuilder.java | 34 ++++ .../compute/data/AbstractVectorBuilder.java | 41 ++++- .../org/elasticsearch/compute/data/Block.java | 6 +- .../compute/data/BlockFactory.java | 4 +- .../compute/data/BlockUtils.java | 2 +- .../compute/data/ConstantNullBlock.java | 14 ++ .../elasticsearch/compute/data/DocBlock.java | 17 +- .../compute/data/ElementType.java | 25 ++- .../elasticsearch/compute/data/Vector.java | 6 +- .../compute/data/X-ArrayBlock.java.st | 3 +- .../compute/data/X-ArrayVector.java.st | 2 +- .../compute/data/X-BlockBuilder.java.st | 55 +++++- .../compute/data/X-VectorBuilder.java.st | 48 +++++- .../compute/data/X-VectorFixedBuilder.java.st | 8 + .../compute/operator/Driver.java | 8 +- .../exchange/ExchangeSinkOperator.java | 4 - .../compute/operator/topn/ResultBuilder.java | 26 ++- .../operator/topn/ResultBuilderForDoc.java | 16 +- .../operator/topn/ResultBuilderForNull.java | 13 +- .../compute/operator/topn/TopNOperator.java | 67 ++++++-- .../operator/topn/X-ResultBuilder.java.st | 10 +- .../elasticsearch/compute/OperatorTests.java | 2 +- .../AggregatorFunctionTestCase.java | 8 +- .../CountAggregatorFunctionTests.java | 5 +- ...istinctBooleanAggregatorFunctionTests.java | 3 +- ...ooleanGroupingAggregatorFunctionTests.java | 3 +- ...stinctBytesRefAggregatorFunctionTests.java | 3 +- ...tesRefGroupingAggregatorFunctionTests.java | 3 +- ...DistinctDoubleAggregatorFunctionTests.java | 3 +- ...DoubleGroupingAggregatorFunctionTests.java | 3 +- ...untDistinctIntAggregatorFunctionTests.java | 2 +- ...nctIntGroupingAggregatorFunctionTests.java | 3 +- ...ntDistinctLongAggregatorFunctionTests.java | 4 +- ...ctLongGroupingAggregatorFunctionTests.java | 4 +- .../CountGroupingAggregatorFunctionTests.java | 4 +- .../GroupingAggregatorFunctionTestCase.java | 28 +-- .../MaxDoubleAggregatorFunctionTests.java | 3 +- ...DoubleGroupingAggregatorFunctionTests.java | 3 +- .../MaxIntAggregatorFunctionTests.java | 3 +- ...MaxIntGroupingAggregatorFunctionTests.java | 3 +- .../MaxLongAggregatorFunctionTests.java | 5 +- ...axLongGroupingAggregatorFunctionTests.java | 8 +- ...eviationDoubleAggregatorFunctionTests.java | 3 +- ...DoubleGroupingAggregatorFunctionTests.java | 3 +- ...teDeviationIntAggregatorFunctionTests.java | 3 +- ...ionIntGroupingAggregatorFunctionTests.java | 3 +- ...eDeviationLongAggregatorFunctionTests.java | 5 +- ...onLongGroupingAggregatorFunctionTests.java | 5 +- .../MinDoubleAggregatorFunctionTests.java | 3 +- ...DoubleGroupingAggregatorFunctionTests.java | 3 +- .../MinIntAggregatorFunctionTests.java | 3 +- ...MinIntGroupingAggregatorFunctionTests.java | 3 +- .../MinLongAggregatorFunctionTests.java | 5 +- ...inLongGroupingAggregatorFunctionTests.java | 8 +- ...rcentileDoubleAggregatorFunctionTests.java | 3 +- ...DoubleGroupingAggregatorFunctionTests.java | 3 +- .../PercentileIntAggregatorFunctionTests.java | 3 +- ...ileIntGroupingAggregatorFunctionTests.java | 3 +- ...PercentileLongAggregatorFunctionTests.java | 5 +- ...leLongGroupingAggregatorFunctionTests.java | 4 +- .../SumDoubleAggregatorFunctionTests.java | 3 +- ...DoubleGroupingAggregatorFunctionTests.java | 3 +- .../SumIntAggregatorFunctionTests.java | 2 +- ...SumIntGroupingAggregatorFunctionTests.java | 3 +- .../SumLongAggregatorFunctionTests.java | 6 +- ...umLongGroupingAggregatorFunctionTests.java | 4 +- .../compute/data/BlockBuilderTests.java | 146 +++++++++++++--- .../compute/data/BlockFactoryTests.java | 28 ++- .../compute/data/BlockTestUtils.java | 1 + .../data/BytesRefBlockEqualityTests.java | 37 ++-- .../compute/data/DocVectorTests.java | 57 ++++--- .../data/DoubleBlockEqualityTests.java | 37 ++-- .../compute/data/IntBlockEqualityTests.java | 37 ++-- .../compute/data/LongBlockEqualityTests.java | 39 +++-- .../compute/data/TestBlockBuilder.java | 25 +++ .../compute/data/VectorBuilderTests.java | 153 +++++++++++++++++ .../compute/data/VectorFixedBuilderTests.java | 39 +++-- .../ValuesSourceReaderOperatorTests.java | 30 +++- .../operator/AggregationOperatorTests.java | 5 +- .../compute/operator/AnyOperatorTestCase.java | 2 +- .../operator/CannedSourceOperator.java | 25 ++- .../operator/ColumnExtractOperatorTests.java | 3 +- .../compute/operator/EvalOperatorTests.java | 5 +- .../compute/operator/FilterOperatorTests.java | 5 +- .../operator/ForkingOperatorTestCase.java | 12 +- .../HashAggregationOperatorTests.java | 8 +- .../compute/operator/LimitOperatorTests.java | 5 +- .../operator/MvExpandOperatorTests.java | 3 +- .../compute/operator/OperatorTestCase.java | 50 ++++-- .../operator/ProjectOperatorTests.java | 2 +- .../SequenceLongBlockSourceOperator.java | 18 +- .../operator/StringExtractOperatorTests.java | 3 +- .../operator/TupleBlockSourceOperator.java | 12 +- .../compute/operator/topn/ExtractorTests.java | 17 +- .../operator/topn/TopNOperatorTests.java | 160 +++++++++++++++--- .../rest-api-spec/test/50_index_patterns.yml | 59 +++---- .../metadata-ignoreCsvTests.csv-spec | 3 +- .../xpack/esql/action/EsqlActionIT.java | 145 ++++++++-------- .../xpack/esql/action/EsqlDisruptionIT.java | 4 + 129 files changed, 1592 insertions(+), 486 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java index 9ef4eef2a6924..84f7cec47b737 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -107,6 +108,7 @@ private static Operator operator(String data, int topCount) { ClusterSettings.createBuiltInClusterSettings() ); return new TopNOperator( + BlockFactory.getNonBreakingInstance(), breakerService.getBreaker(CircuitBreaker.REQUEST), topCount, elementTypes, diff --git a/libs/core/src/main/java/org/elasticsearch/core/Releasables.java b/libs/core/src/main/java/org/elasticsearch/core/Releasables.java index b8d1a9a542779..c2b48c4706573 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/Releasables.java +++ b/libs/core/src/main/java/org/elasticsearch/core/Releasables.java @@ -89,7 +89,7 @@ private static void close(boolean success, Releasable... releasables) { * // the resources will be released when reaching here * */ - public static Releasable wrap(final Iterable releasables) { + public static Releasable wrap(final Iterable releasables) { return new Releasable() { @Override public void close() { diff --git a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java index de061c7f314d6..91dbfc30123fe 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BytesRefArray.java @@ -160,7 +160,14 @@ public void writeTo(StreamOutput out) throws IOException { @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + startOffsets.ramBytesUsed() + bytes.ramBytesUsed(); + return BASE_RAM_BYTES_USED + bigArraysRamBytesUsed(); + } + + /** + * Memory used by the {@link BigArrays} portion of this {@link BytesRefArray}. + */ + public long bigArraysRamBytesUsed() { + return startOffsets.ramBytesUsed() + bytes.ramBytesUsed(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 02e8cfd7f16fc..0a0592b5a01f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -678,6 +678,9 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu while (true) { long old = used.get(); long total = old + bytes; + if (total < 0) { + throw new AssertionError("total must be >= 0 but was [" + total + "]"); + } if (total > max.getBytes()) { throw new CircuitBreakingException(ERROR_MESSAGE, bytes, max.getBytes(), Durability.TRANSIENT); } @@ -689,7 +692,10 @@ public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws Circu @Override public void addWithoutBreaking(long bytes) { - used.addAndGet(bytes); + long total = used.addAndGet(bytes); + if (total < 0) { + throw new AssertionError("total must be >= 0 but was [" + total + "]"); + } } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java b/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java index 15ffa52569d00..bd5f974a5f800 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/CrankyCircuitBreakerService.java @@ -15,6 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerStats; import org.elasticsearch.test.ESTestCase; +import java.util.concurrent.atomic.AtomicLong; + /** * {@link CircuitBreakerService} that fails one twentieth of the time when you * add bytes. This is useful to make sure code responds sensibly to circuit @@ -27,31 +29,32 @@ public class CrankyCircuitBreakerService extends CircuitBreakerService { public static final String ERROR_MESSAGE = "cranky breaker"; private final CircuitBreaker breaker = new CircuitBreaker() { - @Override - public void circuitBreak(String fieldName, long bytesNeeded) { + private final AtomicLong used = new AtomicLong(); - } + @Override + public void circuitBreak(String fieldName, long bytesNeeded) {} @Override public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { if (ESTestCase.random().nextInt(20) == 0) { throw new CircuitBreakingException(ERROR_MESSAGE, Durability.PERMANENT); } + used.addAndGet(bytes); } @Override public void addWithoutBreaking(long bytes) { - + used.addAndGet(bytes); } @Override public long getUsed() { - return 0; + return used.get(); } @Override public long getLimit() { - return 0; + return Long.MAX_VALUE; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index d8a5e471aaf84..b6e36e698355b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -75,6 +75,7 @@ public BooleanBlock expand() { public static long ramBytesEstimated(boolean[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index a7d397fcfb98e..98b9fdb948bc0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB BooleanBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new boolean[initialSize]; } @@ -192,8 +194,16 @@ public BooleanBlock build() { block = new BooleanArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java index 45c74ee6e06d4..3792e39275f82 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public BooleanVector build() { + finish(); BooleanVector vector; if (valueCount == 1) { vector = new ConstantBooleanVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public BooleanVector build() { } vector = new BooleanArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index 30146d4e55c02..1428a1a221fa3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -58,4 +58,12 @@ public BooleanVector build() { } return new BooleanArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index e4ee70cd27a47..db5b5d3fcf804 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -77,6 +77,7 @@ public BytesRefBlock expand() { public static long ramBytesEstimated(BytesRefArray values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override @@ -115,7 +116,7 @@ public void close() { throw new IllegalStateException("can't release already released block [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index c8f5276a99db5..1692bfc59358a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -85,7 +85,7 @@ public String toString() { @Override public void close() { - blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 23c18d2a9ca6e..a60b26667eb79 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -193,19 +193,42 @@ public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { public BytesRefBlock build() { finish(); BytesRefBlock block; + assert estimatedBytes == 0 || firstValueIndexes != null; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); Releasables.closeExpectNoException(values); } else { - estimatedBytes += values.ramBytesUsed(); if (isDense() && singleValued()) { block = new BytesRefArrayVector(values, positionCount, blockFactory).asBlock(); } else { block = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + values = null; + built(); return block; } + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java index f37ffb2a7e28a..5ea9a2b7d0184 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java @@ -54,16 +54,40 @@ protected void growValuesArray(int newSize) { @Override public BytesRefVector build() { + finish(); BytesRefVector vector; + assert estimatedBytes == 0; if (valueCount == 1) { vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed(), false); Releasables.closeExpectNoException(values); } else { - estimatedBytes = values.ramBytesUsed(); vector = new BytesRefArrayVector(values, valueCount, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - values.bigArraysRamBytesUsed(), false); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + values = null; + built(); return vector; } + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index b0de974a85c24..675952a8d6a85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -75,6 +75,7 @@ public DoubleBlock expand() { public static long ramBytesEstimated(double[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index a97f58f3924b1..dca8fe2d0d2e6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo DoubleBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new double[initialSize]; } @@ -192,8 +194,16 @@ public DoubleBlock build() { block = new DoubleArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java index f92ec67aec012..12fa06a944fbc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public DoubleVector build() { + finish(); DoubleVector vector; if (valueCount == 1) { vector = new ConstantDoubleVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public DoubleVector build() { } vector = new DoubleArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index 83992ed71b720..b636d9eb19756 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -58,4 +58,12 @@ public DoubleVector build() { } return new DoubleArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 7a345941df019..4170009b89ab2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -75,6 +75,7 @@ public IntBlock expand() { public static long ramBytesEstimated(int[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 53d379d715c9b..ba96f85e73197 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui IntBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new int[initialSize]; } @@ -192,8 +194,16 @@ public IntBlock build() { block = new IntArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java index 0533d5463a4e7..155adfec02b9f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public IntVector build() { + finish(); IntVector vector; if (valueCount == 1) { vector = new ConstantIntVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public IntVector build() { } vector = new IntArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 19303b4024869..03a15fb10a800 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -58,4 +58,12 @@ public IntVector build() { } return new IntArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 21c6b445cd37d..778ec4294180c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -75,6 +75,7 @@ public LongBlock expand() { public static long ramBytesEstimated(long[] values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index a378b382ce31e..09d858e7c9b03 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; /** @@ -20,7 +22,7 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B LongBlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new long[initialSize]; } @@ -192,8 +194,16 @@ public LongBlock build() { block = new LongArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); + built(); return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java index 6b2e9f1de7d51..3b8bbf4219d00 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java @@ -49,6 +49,7 @@ protected void growValuesArray(int newSize) { @Override public LongVector build() { + finish(); LongVector vector; if (valueCount == 1) { vector = new ConstantLongVector(values[0], 1, blockFactory); @@ -58,8 +59,16 @@ public LongVector build() { } vector = new LongArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); + built(); return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 5414b7669f588..0960d607d9c0d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -58,4 +58,12 @@ public LongVector build() { } return new LongArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java index 50cef0417dd45..3d568adc2b5ea 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBoolean.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; class ResultBuilderForBoolean implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForBoolean implements ResultBuilder { */ private boolean key; - ResultBuilderForBoolean(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForBoolean(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = BooleanBlock.newBlockBuilder(initialSize); + this.builder = BooleanBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public BooleanBlock build() { public String toString() { return "ResultBuilderForBoolean[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java index 55f324c931b67..e37f82f3363a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForBytesRef.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; class ResultBuilderForBytesRef implements ResultBuilder { @@ -24,10 +25,10 @@ class ResultBuilderForBytesRef implements ResultBuilder { */ private BytesRef key; - ResultBuilderForBytesRef(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForBytesRef(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { this.encoder = encoder; this.inKey = inKey; - this.builder = BytesRefBlock.newBlockBuilder(initialSize); + this.builder = BytesRefBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -67,4 +68,9 @@ public BytesRefBlock build() { public String toString() { return "ResultBuilderForBytesRef[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java index ed4a9b45d90dc..77c976c6e0085 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForDouble.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; class ResultBuilderForDouble implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForDouble implements ResultBuilder { */ private double key; - ResultBuilderForDouble(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForDouble(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = DoubleBlock.newBlockBuilder(initialSize); + this.builder = DoubleBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public DoubleBlock build() { public String toString() { return "ResultBuilderForDouble[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java index 2bcfc81107445..389ed3bc2e3c3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForInt.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; class ResultBuilderForInt implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForInt implements ResultBuilder { */ private int key; - ResultBuilderForInt(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForInt(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = IntBlock.newBlockBuilder(initialSize); + this.builder = IntBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public IntBlock build() { public String toString() { return "ResultBuilderForInt[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java index 3ada85bf9d5c9..63ee9d35c59e5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ResultBuilderForLong.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; class ResultBuilderForLong implements ResultBuilder { @@ -20,10 +21,10 @@ class ResultBuilderForLong implements ResultBuilder { */ private long key; - ResultBuilderForLong(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderForLong(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); this.inKey = inKey; - this.builder = LongBlock.newBlockBuilder(initialSize); + this.builder = LongBlock.newBlockBuilder(initialSize, blockFactory); } @Override @@ -63,4 +64,9 @@ public LongBlock build() { public String toString() { return "ResultBuilderForLong[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index a6ad5d1299543..3d06eba398513 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -33,6 +33,8 @@ abstract class AbstractBlockBuilder implements Block.Builder { /** The number of bytes currently estimated with the breaker. */ protected long estimatedBytes; + private boolean closed = false; + protected AbstractBlockBuilder(BlockFactory blockFactory) { this.blockFactory = blockFactory; } @@ -101,7 +103,14 @@ protected final void updatePosition() { } } + /** + * Called during implementations of {@link Block.Builder#build} as a first step + * to check if the block is still open and to finish the last position. + */ protected final void finish() { + if (closed) { + throw new IllegalStateException("already closed"); + } if (positionEntryIsOpen) { endPositionEntry(); } @@ -110,6 +119,16 @@ protected final void finish() { } } + /** + * Called during implementations of {@link Block.Builder#build} as a last step + * to mark the Builder as closed and make sure that further closes don't double + * free memory. + */ + protected final void built() { + closed = true; + estimatedBytes = 0; + } + protected abstract void growValuesArray(int newSize); /** The number of bytes used to represent each value element. */ @@ -125,6 +144,20 @@ protected final void ensureCapacity() { growValuesArray(newSize); } + @Override + public final void close() { + if (closed == false) { + closed = true; + adjustBreaker(-estimatedBytes); + extraClose(); + } + } + + /** + * Called when first {@link #close() closed}. + */ + protected void extraClose() {} + static int calculateNewArraySize(int currentSize) { // trivially, grows array by 50% return currentSize + (currentSize >> 1); @@ -133,6 +166,7 @@ static int calculateNewArraySize(int currentSize) { protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes, false); estimatedBytes += deltaBytes; + assert estimatedBytes >= 0; } private void setFirstValue(int position, int value) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java index 49ce276074735..274e88cd8d8b6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java @@ -7,9 +7,14 @@ package org.elasticsearch.compute.data; -abstract class AbstractVectorBuilder { +abstract class AbstractVectorBuilder implements Vector.Builder { protected int valueCount; + /** + * Has this builder been closed already? + */ + private boolean closed = false; + protected final BlockFactory blockFactory; /** The number of bytes currently estimated with the breaker. */ @@ -46,4 +51,38 @@ protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes, false); estimatedBytes += deltaBytes; } + + /** + * Called during implementations of {@link Block.Builder#build} as a first step + * to check if the block is still open and to finish the last position. + */ + protected final void finish() { + if (closed) { + throw new IllegalStateException("already closed"); + } + } + + /** + * Called during implementations of {@link Block.Builder#build} as a last step + * to mark the Builder as closed and make sure that further closes don't double + * free memory. + */ + protected final void built() { + closed = true; + estimatedBytes = 0; + } + + @Override + public final void close() { + if (closed == false) { + closed = true; + adjustBreaker(-estimatedBytes); + extraClose(); + } + } + + /** + * Called when first {@link #close() closed}. + */ + protected void extraClose() {} } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 5b10a3a510de0..ca0721ebbe4f8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -133,7 +133,11 @@ static Block constantNullBlock(int positions, BlockFactory blockFactory) { return blockFactory.newConstantNullBlock(positions); } - interface Builder { + /** + * Builds {@link Block}s. Typically, you use one of it's direct supinterfaces like {@link IntBlock.Builder}. + * This is {@link Releasable} and should be released after building the block or if building the block fails. + */ + interface Builder extends Releasable { /** * Appends a null value to the block. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 2afea228a4a78..63de604c49b18 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -305,7 +305,7 @@ public BytesRefBlock newBytesRefArrayBlock( MvOrdering mvOrdering ) { var b = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); - adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed(), true); return b; } @@ -315,7 +315,7 @@ public BytesRefVector.Builder newBytesRefVectorBuilder(int estimatedSize) { public BytesRefVector newBytesRefArrayVector(BytesRefArray values, int positionCount) { var b = new BytesRefArrayVector(values, positionCount, this); - adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + adjustBreaker(b.ramBytesUsed() - values.bigArraysRamBytesUsed(), true); return b; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 2ebbb771b5df1..0d09f7bb480e0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -210,7 +210,7 @@ public static Object toJavaObject(Block block, int position) { private static Object valueAtOffset(Block block, int offset) { return switch (block.elementType()) { case BOOLEAN -> ((BooleanBlock) block).getBoolean(offset); - case BYTES_REF -> ((BytesRefBlock) block).getBytesRef(offset, new BytesRef()); + case BYTES_REF -> BytesRef.deepCopyOf(((BytesRefBlock) block).getBytesRef(offset, new BytesRef())); case DOUBLE -> ((DoubleBlock) block).getDouble(offset); case INT -> ((IntBlock) block).getInt(offset); case LONG -> ((LongBlock) block).getLong(offset); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 2da9cfeba09f0..01994af1cfc96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -136,6 +136,11 @@ public void close() { static class Builder implements Block.Builder { private int positionCount; + /** + * Has this builder been closed already? + */ + private boolean closed = false; + @Override public Builder appendNull() { positionCount++; @@ -174,7 +179,16 @@ public Block.Builder mvOrdering(MvOrdering mvOrdering) { @Override public Block build() { + if (closed) { + throw new IllegalStateException("already closed"); + } + close(); return new ConstantNullBlock(positionCount); } + + @Override + public void close() { + closed = true; + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index b21a956980f6a..6bcf913ce6240 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -82,8 +82,8 @@ public void close() { /** * A builder the for {@link DocBlock}. */ - public static Builder newBlockBuilder(int estimatedSize) { - return new Builder(estimatedSize); + public static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return new Builder(estimatedSize, blockFactory); } public static class Builder implements Block.Builder { @@ -91,10 +91,10 @@ public static class Builder implements Block.Builder { private final IntVector.Builder segments; private final IntVector.Builder docs; - private Builder(int estimatedSize) { - shards = IntVector.newVectorBuilder(estimatedSize); - segments = IntVector.newVectorBuilder(estimatedSize); - docs = IntVector.newVectorBuilder(estimatedSize); + private Builder(int estimatedSize, BlockFactory blockFactory) { + shards = IntVector.newVectorBuilder(estimatedSize, blockFactory); + segments = IntVector.newVectorBuilder(estimatedSize, blockFactory); + docs = IntVector.newVectorBuilder(estimatedSize, blockFactory); } public Builder appendShard(int shard) { @@ -153,5 +153,10 @@ public DocBlock build() { // Pass null for singleSegmentNonDecreasing so we calculate it when we first need it. return new DocVector(shards.build(), segments.build(), docs.build(), null).asBlock(); } + + @Override + public void close() { + Releasables.closeExpectNoException(shards, segments, docs); + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 0c85d433018e0..4467766a9e0ef 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -9,8 +9,6 @@ import org.apache.lucene.util.BytesRef; -import java.util.function.IntFunction; - /** * The type of elements in {@link Block} and {@link Vector} */ @@ -22,7 +20,7 @@ public enum ElementType { /** * Blocks containing only null values. */ - NULL(estimatedSize -> new ConstantNullBlock.Builder()), + NULL((estimatedSize, blockFactory) -> new ConstantNullBlock.Builder()), BYTES_REF(BytesRefBlock::newBlockBuilder), @@ -34,19 +32,32 @@ public enum ElementType { /** * Intermediate blocks which don't support retrieving elements. */ - UNKNOWN(estimatedSize -> { throw new UnsupportedOperationException("can't build null blocks"); }); + UNKNOWN((estimatedSize, blockFactory) -> { throw new UnsupportedOperationException("can't build null blocks"); }); + + interface BuilderSupplier { + Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory); + } - private final IntFunction builder; + private final BuilderSupplier builder; - ElementType(IntFunction builder) { + ElementType(BuilderSupplier builder) { this.builder = builder; } /** * Create a new {@link Block.Builder} for blocks of this type. + * @deprecated use {@link #newBlockBuilder(int, BlockFactory)} */ + @Deprecated public Block.Builder newBlockBuilder(int estimatedSize) { - return builder.apply(estimatedSize); + return builder.newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + /** + * Create a new {@link Block.Builder} for blocks of this type. + */ + public Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return builder.newBlockBuilder(estimatedSize, blockFactory); } public static ElementType fromJava(Class type) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 171bdbd62f4d0..c9ecf1aa9e399 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -50,7 +50,11 @@ public interface Vector extends Accountable, Releasable { /** The block factory associated with this vector. */ BlockFactory blockFactory(); - interface Builder { + /** + * Builds {@link Vector}s. Typically, you use one of it's direct supinterfaces like {@link IntVector.Builder}. + * This is {@link Releasable} and should be released after building the vector or if building the vector fails. + */ + interface Builder extends Releasable { /** * Builds the block. This method can be called multiple times. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 10ff868c09806..ddb0eced039be 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -93,6 +93,7 @@ $endif$ public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int[] firstValueIndexes, BitSet nullsMask) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask) + RamUsageEstimator.shallowSizeOfInstance(MvOrdering.class); + // TODO mvordering is shared } @Override @@ -137,7 +138,7 @@ $endif$ } released = true; $if(BytesRef)$ - blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); $else$ blockFactory.adjustBreaker(-ramBytesUsed(), true); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index b6a8714f882ee..3e6ccc2286675 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -110,7 +110,7 @@ $endif$ $if(BytesRef)$ @Override public void close() { - blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); + blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 4d43f25577cc5..0ccfc45f18664 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -14,6 +14,8 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.core.Releasables; $else$ +import org.apache.lucene.util.RamUsageEstimator; + import java.util.Arrays; $endif$ @@ -41,7 +43,7 @@ $else$ $Type$BlockBuilder(int estimatedSize, BlockFactory blockFactory) { super(blockFactory); int initialSize = Math.max(estimatedSize, 2); - adjustBreaker(initialSize); + adjustBreaker(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + initialSize * elementSize()); values = new $type$[initialSize]; } $endif$ @@ -246,27 +248,68 @@ $endif$ public $Type$Block build() { finish(); $Type$Block block; - if (hasNonNullValue && positionCount == 1 && valueCount == 1) { $if(BytesRef)$ + assert estimatedBytes == 0 || firstValueIndexes != null; + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); Releasables.closeExpectNoException(values); } else { - estimatedBytes += values.ramBytesUsed(); + if (isDense() && singleValued()) { + block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); + } else { + block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); + } + values = null; $else$ + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { block = new Constant$Type$Vector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } -$endif$ if (isDense() && singleValued()) { block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); } else { block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, false); +$endif$ + built(); return block; } +$if(BytesRef)$ + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st index b813120b42e43..1e243c49b5d82 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st @@ -83,24 +83,62 @@ $endif$ @Override public $Type$Vector build() { + finish(); $Type$Vector vector; - if (valueCount == 1) { $if(BytesRef)$ + assert estimatedBytes == 0; + if (valueCount == 1) { vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed(), false); Releasables.closeExpectNoException(values); } else { - estimatedBytes = values.ramBytesUsed(); + vector = new $Type$ArrayVector(values, valueCount, blockFactory); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - values.bigArraysRamBytesUsed(), false); + } + values = null; $else$ + if (valueCount == 1) { vector = new Constant$Type$Vector(values[0], 1, blockFactory); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } -$endif$ vector = new $Type$ArrayVector(values, valueCount, blockFactory); } - // update the breaker with the actual bytes used. - blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, false); +$endif$ + built(); return vector; } +$if(BytesRef)$ + + @Override + public void extraClose() { + Releasables.closeExpectNoException(values); + } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index 86bc6b0a095d6..d732c85db7467 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -58,4 +58,12 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { } return new $Type$ArrayVector(values, values.length, blockFactory); } + + @Override + public void close() { + if (nextIndex >= 0) { + // If nextIndex < 0 we've already built the vector + blockFactory.adjustBreaker(-ramBytesUsed(values.length), false); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 281693a487255..1a1604406892c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -181,7 +181,13 @@ private SubscribableListener runSingleLoopIteration() { if (op.isFinished() == false && nextOp.needsInput()) { Page page = op.getOutput(); - if (page != null && page.getPositionCount() != 0) { + if (page == null) { + // No result, just move to the next iteration + } else if (page.getPositionCount() == 0) { + // Empty result, release any memory it holds immediately and move to the next iteration + page.releaseBlocks(); + } else { + // Non-empty result from the previous operation, move it to the next operation nextOp.addInput(page); movedPage = true; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java index d52f25e9d8306..0fb6ec6f63d96 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java @@ -36,10 +36,6 @@ public record ExchangeSinkOperatorFactory(Supplier exchangeSinks, implements SinkOperatorFactory { - public ExchangeSinkOperatorFactory(Supplier exchangeSinks) { - this(exchangeSinks, Function.identity()); - } - @Override public SinkOperator get(DriverContext driverContext) { return new ExchangeSinkOperator(exchangeSinks.get(), transformer); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java index b8a41a3ee343d..bd2027cade78f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilder.java @@ -9,12 +9,14 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.core.Releasable; /** * Builds {@link Block}s from keys and values encoded into {@link BytesRef}s. */ -interface ResultBuilder { +interface ResultBuilder extends Releasable { /** * Called for each sort key before {@link #decodeValue} to consume the sort key and * store the value of the key for {@link #decodeValue} can use it to reconstruct @@ -36,15 +38,21 @@ interface ResultBuilder { */ Block build(); - static ResultBuilder resultBuilderFor(ElementType elementType, TopNEncoder encoder, boolean inKey, int positions) { + static ResultBuilder resultBuilderFor( + BlockFactory blockFactory, + ElementType elementType, + TopNEncoder encoder, + boolean inKey, + int positions + ) { return switch (elementType) { - case BOOLEAN -> new ResultBuilderForBoolean(encoder, inKey, positions); - case BYTES_REF -> new ResultBuilderForBytesRef(encoder, inKey, positions); - case INT -> new ResultBuilderForInt(encoder, inKey, positions); - case LONG -> new ResultBuilderForLong(encoder, inKey, positions); - case DOUBLE -> new ResultBuilderForDouble(encoder, inKey, positions); - case NULL -> new ResultBuilderForNull(); - case DOC -> new ResultBuilderForDoc(positions); + case BOOLEAN -> new ResultBuilderForBoolean(blockFactory, encoder, inKey, positions); + case BYTES_REF -> new ResultBuilderForBytesRef(blockFactory, encoder, inKey, positions); + case INT -> new ResultBuilderForInt(blockFactory, encoder, inKey, positions); + case LONG -> new ResultBuilderForLong(blockFactory, encoder, inKey, positions); + case DOUBLE -> new ResultBuilderForDouble(blockFactory, encoder, inKey, positions); + case NULL -> new ResultBuilderForNull(blockFactory); + case DOC -> new ResultBuilderForDoc(blockFactory, positions); default -> { assert false : "Result builder for [" + elementType + "]"; throw new UnsupportedOperationException("Result builder for [" + elementType + "]"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java index 166d5be83b474..7fb507ffdbead 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java @@ -13,12 +13,15 @@ import org.elasticsearch.compute.data.DocVector; class ResultBuilderForDoc implements ResultBuilder { + private final BlockFactory blockFactory; private final int[] shards; private final int[] segments; private final int[] docs; private int position; - ResultBuilderForDoc(int positions) { + ResultBuilderForDoc(BlockFactory blockFactory, int positions) { + // TODO use fixed length builders + this.blockFactory = blockFactory; this.shards = new int[positions]; this.segments = new int[positions]; this.docs = new int[positions]; @@ -40,9 +43,9 @@ public void decodeValue(BytesRef values) { @Override public Block build() { return new DocVector( - BlockFactory.getNonBreakingInstance().newIntArrayVector(shards, position), - BlockFactory.getNonBreakingInstance().newIntArrayVector(segments, position), - BlockFactory.getNonBreakingInstance().newIntArrayVector(docs, position), + blockFactory.newIntArrayVector(shards, position), + blockFactory.newIntArrayVector(segments, position), + blockFactory.newIntArrayVector(docs, position), null ).asBlock(); } @@ -51,4 +54,9 @@ public Block build() { public String toString() { return "ValueExtractorForDoc"; } + + @Override + public void close() { + // TODO memory accounting + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java index 05b9ba2a07658..a45f16fc30910 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForNull.java @@ -9,10 +9,16 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; public class ResultBuilderForNull implements ResultBuilder { + private final BlockFactory blockFactory; private int positions; + public ResultBuilderForNull(BlockFactory blockFactory) { + this.blockFactory = blockFactory; + } + @Override public void decodeKey(BytesRef keys) { throw new AssertionError("somehow got a value for a null key"); @@ -29,11 +35,16 @@ public void decodeValue(BytesRef values) { @Override public Block build() { - return Block.constantNullBlock(positions); + return Block.constantNullBlock(positions, blockFactory); } @Override public String toString() { return "ValueExtractorForNull"; } + + @Override + public void close() { + // Nothing to close + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 86b3a18992db4..9657d60376763 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -12,7 +12,9 @@ import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; @@ -205,7 +207,15 @@ public record TopNOperatorFactory( @Override public TopNOperator get(DriverContext driverContext) { - return new TopNOperator(driverContext.breaker(), topCount, elementTypes, encoders, sortOrders, maxPageSize); + return new TopNOperator( + driverContext.blockFactory(), + driverContext.breaker(), + topCount, + elementTypes, + encoders, + sortOrders, + maxPageSize + ); } @Override @@ -222,6 +232,7 @@ public String describe() { } } + private final BlockFactory blockFactory; private final CircuitBreaker breaker; private final Queue inputQueue; @@ -231,9 +242,11 @@ public String describe() { private final List encoders; private final List sortOrders; + private Row spare; private Iterator output; public TopNOperator( + BlockFactory blockFactory, CircuitBreaker breaker, int topCount, List elementTypes, @@ -241,6 +254,7 @@ public TopNOperator( List sortOrders, int maxPageSize ) { + this.blockFactory = blockFactory; this.breaker = breaker; this.maxPageSize = maxPageSize; this.elementTypes = elementTypes; @@ -301,21 +315,20 @@ public void addInput(Page page) { * and must be closed. That happens either because it's overflow from the * inputQueue or because we hit an allocation failure while building it. */ - Row row = null; try { for (int i = 0; i < page.getPositionCount(); i++) { - if (row == null) { - row = new Row(breaker); + if (spare == null) { + spare = new Row(breaker); } else { - row.keys.clear(); - row.orderByCompositeKeyAscending.clear(); - row.values.clear(); + spare.keys.clear(); + spare.orderByCompositeKeyAscending.clear(); + spare.values.clear(); } - rowFiller.row(i, row); - row = inputQueue.insertWithOverflow(row); + rowFiller.row(i, spare); + spare = inputQueue.insertWithOverflow(spare); } } finally { - Releasables.close(row); + Releasables.close(() -> page.releaseBlocks()); } } @@ -327,18 +340,24 @@ public void finish() { } private Iterator toPages() { + if (spare != null) { + // Remove the spare, we're never going to use it again. + spare.close(); + spare = null; + } if (inputQueue.size() == 0) { return Collections.emptyIterator(); } List list = new ArrayList<>(inputQueue.size()); + List result = new ArrayList<>(); + ResultBuilder[] builders = null; + boolean success = false; try { while (inputQueue.size() > 0) { list.add(inputQueue.pop()); } Collections.reverse(list); - List result = new ArrayList<>(); - ResultBuilder[] builders = null; int p = 0; int size = 0; for (int i = 0; i < list.size(); i++) { @@ -347,6 +366,7 @@ private Iterator toPages() { builders = new ResultBuilder[elementTypes.size()]; for (int b = 0; b < builders.length; b++) { builders[b] = ResultBuilder.resultBuilderFor( + blockFactory, elementTypes.get(b), encoders.get(b).toUnsortable(), channelInKey(sortOrders, b), @@ -386,14 +406,22 @@ private Iterator toPages() { p++; if (p == size) { result.add(new Page(Arrays.stream(builders).map(ResultBuilder::build).toArray(Block[]::new))); + Releasables.closeExpectNoException(builders); builders = null; } - } assert builders == null; + success = true; return result.iterator(); } finally { - Releasables.closeExpectNoException(() -> Releasables.close(list)); + if (success == false) { + List close = new ArrayList<>(list); + for (Page p : result) { + close.add(p::releaseBlocks); + } + Collections.addAll(close, builders); + Releasables.closeExpectNoException(Releasables.wrap(close)); + } } } @@ -422,10 +450,15 @@ public Page getOutput() { @Override public void close() { /* - * If everything went well we'll have drained inputQueue to this'll - * be a noop. But if inputQueue + * If we close before calling finish then spare and inputQueue will be live rows + * that need closing. If we close after calling finish then the output iterator + * will contain pages of results that have yet to be returned. */ - Releasables.closeExpectNoException(() -> Releasables.close(inputQueue)); + Releasables.closeExpectNoException( + spare, + inputQueue == null ? null : Releasables.wrap(inputQueue), + output == null ? null : Releasables.wrap(() -> Iterators.map(output, p -> p::releaseBlocks)) + ); } private static long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(TopNOperator.class) + RamUsageEstimator diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st index 5f9a35bd0ebd3..ebe62398c8504 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/X-ResultBuilder.java.st @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator.topn; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.$Type$Block; class ResultBuilderFor$Type$ implements ResultBuilder { @@ -26,14 +27,14 @@ $endif$ */ private $type$ key; - ResultBuilderFor$Type$(TopNEncoder encoder, boolean inKey, int initialSize) { + ResultBuilderFor$Type$(BlockFactory blockFactory, TopNEncoder encoder, boolean inKey, int initialSize) { $if(BytesRef)$ this.encoder = encoder; $else$ assert encoder == TopNEncoder.DEFAULT_UNSORTABLE : encoder.toString(); $endif$ this.inKey = inKey; - this.builder = $Type$Block.newBlockBuilder(initialSize); + this.builder = $Type$Block.newBlockBuilder(initialSize, blockFactory); } @Override @@ -81,4 +82,9 @@ $endif$ public String toString() { return "ResultBuilderFor$Type$[inKey=" + inKey + "]"; } + + @Override + public void close() { + builder.close(); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 04a966b399870..bdf696f460060 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -287,7 +287,7 @@ public void testLimitOperator() { try ( var driver = new Driver( driverContext, - new SequenceLongBlockSourceOperator(values, 100), + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), values, 100), List.of((new LimitOperator.Factory(limit)).get(driverContext)), new PageConsumerOperator(page -> { LongBlock block = page.getBlock(0); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index a4b6c8b965962..22325039af124 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -91,8 +91,8 @@ protected final ByteSizeValue smallEnoughToCircuitBreak() { public final void testIgnoresNulls() { int end = between(1_000, 100_000); List results = new ArrayList<>(); - List input = CannedSourceOperator.collectPages(simpleInput(end)); DriverContext driverContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), end)); try ( Driver d = new Driver( @@ -111,7 +111,9 @@ public final void testIgnoresNulls() { public final void testMultivalued() { int end = between(1_000, 100_000); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(new PositionMergingSourceOperator(simpleInput(end))); + List input = CannedSourceOperator.collectPages( + new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end)) + ); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } @@ -119,7 +121,7 @@ public final void testMultivaluedWithNulls() { int end = between(1_000, 100_000); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages( - new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(end))) + new NullInsertingSourceOperator(new PositionMergingSourceOperator(simpleInput(driverContext.blockFactory(), end))) ); assertSimpleOutput(input, drive(simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext), input.iterator())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java index 11241020a6709..623de7fdd1fff 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,8 +22,8 @@ public class CountAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLong())); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLong())); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java index 74cd88feed3f4..febbb4d4a0615 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceBooleanBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class CountDistinctBooleanAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceBooleanBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> randomBoolean()).toList()); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java index eab1b9cb2d8de..7360b101bf79d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongBooleanTupleBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongBooleanTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomBoolean())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java index 69ccc0a04c0f9..c495a6b9f196b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.BytesRefBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -23,7 +24,7 @@ public class CountDistinctBytesRefAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, Math.min(Integer.MAX_VALUE, Integer.MAX_VALUE / size)); return new BytesRefBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> new BytesRef(String.valueOf(between(-max, max)))).toList() diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java index 919d06af430fd..eadbba9f91880 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongBytesRefTupleBlockSourceOperator; @@ -35,7 +36,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongBytesRefTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), new BytesRef(String.valueOf(between(1, 10000))))) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java index c0678441cdc74..ccfe7b426ebca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -23,7 +24,7 @@ public class CountDistinctDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java index 5a928f12d33b7..0c4d89da09b99 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -34,7 +35,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomDoubleBetween(0, 100, true))) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 3699a87431937..b67e4cdee7e97 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -29,7 +29,7 @@ public class CountDistinctIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, Math.min(Integer.MAX_VALUE, Integer.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(-max, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java index f2a46e9f4c3af..678024c19d391 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -34,7 +35,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), between(0, 10000)))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 556f9d0ccc462..704b5c649f744 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -30,9 +30,9 @@ public class CountDistinctLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java index a5959471b8e15..4282adaba595e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,9 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomGroupId(size), randomLongBetween(0, 100_000))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java index 54a35fcc19cb2..945c68711bb4e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -33,9 +34,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { if (randomBoolean()) { return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index eab6eb30261bd..4ae58fd8c6333 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -147,7 +147,9 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { public final void testNullGroupsAndValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(simpleInput(end))); + List input = CannedSourceOperator.collectPages( + new NullInsertingSourceOperator(simpleInput(driverContext.blockFactory(), end)) + ); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -155,7 +157,7 @@ public final void testNullGroupsAndValues() { public final void testNullGroups() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullGroups(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -184,7 +186,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl public final void testNullValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -192,7 +194,7 @@ public final void testNullValues() { public final void testNullValuesInitialIntermediateFinal() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(nullValues(simpleInput(driverContext.blockFactory(), end))); List results = drive( List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), @@ -220,7 +222,7 @@ protected void appendNull(ElementType elementType, Block.Builder builder, int bl public final void testMultivalued() { DriverContext driverContext = driverContext(); int end = between(1_000, 100_000); - List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(end))); + List input = CannedSourceOperator.collectPages(mergeValues(simpleInput(driverContext.blockFactory(), end))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -228,7 +230,9 @@ public final void testMultivalued() { public final void testMulitvaluedNullGroupsAndValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(new NullInsertingSourceOperator(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages( + new NullInsertingSourceOperator(mergeValues(simpleInput(driverContext.blockFactory(), end))) + ); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -236,7 +240,7 @@ public final void testMulitvaluedNullGroupsAndValues() { public final void testMulitvaluedNullGroup() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages(nullGroups(mergeValues(simpleInput(driverContext.blockFactory(), end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -244,7 +248,7 @@ public final void testMulitvaluedNullGroup() { public final void testMulitvaluedNullValues() { DriverContext driverContext = driverContext(); int end = between(50, 60); - List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(end)))); + List input = CannedSourceOperator.collectPages(nullValues(mergeValues(simpleInput(driverContext.blockFactory(), end)))); List results = drive(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext), input.iterator()); assertSimpleOutput(input, results); } @@ -295,12 +299,13 @@ private void assertNullOnly(List operators) { public final void testNullSome() { DriverContext driverContext = driverContext(); - assertNullSome(List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); + assertNullSome(driverContext, List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext))); } public final void testNullSomeInitialFinal() { DriverContext driverContext = driverContext(); assertNullSome( + driverContext, List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.FINAL).get(driverContext) @@ -311,6 +316,7 @@ public final void testNullSomeInitialFinal() { public final void testNullSomeInitialIntermediateFinal() { DriverContext driverContext = driverContext(); assertNullSome( + driverContext, List.of( simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INITIAL).get(driverContext), simpleWithMode(nonBreakingBigArrays().withCircuitBreaking(), AggregatorMode.INTERMEDIATE).get(driverContext), @@ -322,8 +328,8 @@ public final void testNullSomeInitialIntermediateFinal() { /** * Run the agg on some data where one group is always null. */ - private void assertNullSome(List operators) { - List inputData = CannedSourceOperator.collectPages(simpleInput(1000)); + private void assertNullSome(DriverContext driverContext, List operators) { + List inputData = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 1000)); SeenGroups seenGroups = seenGroups(inputData); long nullGroup = randomFrom(seenGroups.nonNull); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java index b67220b4909b7..cfda483d029f6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class MaxDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java index 3750aec95f3a7..9a2c8bc17685d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -24,7 +25,7 @@ public class MaxDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java index 72cfa06222b50..e76021b883120 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,7 +21,7 @@ public class MaxIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(IntStream.range(0, size).map(l -> randomInt())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java index 9ffee498eeba2..313e10be39855 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt()))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java index 4e84f2e672b97..a51aa98f7a5a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,9 +21,9 @@ public class MaxLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java index e284f2a6103d1..a1f44e128c2e1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,11 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong()))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java index 74bda421a545e..1c14a8e7855ce 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,7 +23,7 @@ public class MedianAbsoluteDeviationDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(1.2, 1.25, 2.0, 2.0, 4.3, 6.0, 9.0); Randomness.shuffle(values); return new SequenceDoubleBlockSourceOperator(values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java index 6751486453f30..06ddb2a734f8c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { double[][] samples = new double[][] { { 1.2, 1.25, 2.0, 2.0, 4.3, 6.0, 9.0 }, { 0.1, 1.5, 2.0, 3.0, 4.0, 7.5, 100.0 }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java index 20506cc5c8f93..40e422b6efc26 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,7 +23,7 @@ public class MedianAbsoluteDeviationIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(12, 125, 20, 20, 43, 60, 90); Randomness.shuffle(values); return new SequenceIntBlockSourceOperator(values); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java index 20f62c67a16cc..2f00764f6fe51 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { int[][] samples = new int[][] { { 12, 125, 20, 20, 43, 60, 90 }, { 1, 15, 20, 30, 40, 75, 1000 }, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java index d80415f83daa2..465bb5800bbb6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -22,10 +23,10 @@ public class MedianAbsoluteDeviationLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List values = Arrays.asList(12L, 125L, 20L, 20L, 43L, 60L, 90L); Randomness.shuffle(values); - return new SequenceLongBlockSourceOperator(values); + return new SequenceLongBlockSourceOperator(blockFactory, values); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java index c3cebad8e0e0b..2c6bfc1204591 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -27,7 +28,7 @@ public class MedianAbsoluteDeviationLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { long[][] samples = new long[][] { { 12, 125, 20, 20, 43, 60, 90 }, { 1, 15, 20, 30, 40, 75, 1000 }, @@ -42,7 +43,7 @@ protected SourceOperator simpleInput(int end) { values.add(Tuple.tuple((long) i, v)); } } - return new TupleBlockSourceOperator(values); + return new TupleBlockSourceOperator(blockFactory, values); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java index 622302d549fd0..7e0b7241cf258 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -21,7 +22,7 @@ public class MinDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java index 12c63e354547a..7c4141f4a7ad1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -23,7 +24,7 @@ public class MinDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java index 2dc0e893875ab..dc1ab1398fb90 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,7 +21,7 @@ public class MinIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceIntBlockSourceOperator(IntStream.range(0, size).map(l -> randomInt())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java index 4ffbe9b1396d3..55cfc2d124e5f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -33,7 +34,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new LongIntBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt()))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java index 25a420237893e..91feb141ac74b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -20,9 +21,9 @@ public class MinLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java index 311e7e41ed9ac..02dda3fe3c236 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -33,8 +34,11 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong()))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java index 96e61d4782022..61f26cd0209b3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -41,7 +42,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java index c0d6595e088eb..9495e78ec47ca 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -42,7 +43,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java index c34a01e608d1a..37d153f7bcae6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -40,7 +41,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(0, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java index a018fba96e897..948e156e52c85 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -42,7 +43,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new LongIntBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), between(-1, max))) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java index cf0b18840d91e..eb32dac18ea80 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -40,9 +41,9 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, 1_000_000); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(0, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(0, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java index 609526532b72e..6360be8595ff8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -42,9 +43,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size / 5); return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLongBetween(-0, max))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 767f9a2d5c25b..d3dc262419008 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; @@ -28,7 +29,7 @@ public class SumDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { return new SequenceDoubleBlockSourceOperator(LongStream.range(0, size).mapToDouble(l -> ESTestCase.randomDouble())); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java index 03a7269b84690..8b86d99653282 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; @@ -23,7 +24,7 @@ public class SumDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new LongDoubleTupleBlockSourceOperator( LongStream.range(0, end).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index e6fccf2d46f61..736386fae3dec 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -27,7 +27,7 @@ public class SumIntAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new SequenceIntBlockSourceOperator(LongStream.range(0, size).mapToInt(l -> between(-max, max))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java index 71666024c819d..0b8678a0e3f05 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; @@ -32,7 +33,7 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { int max = between(1, (int) Math.min(Integer.MAX_VALUE, Long.MAX_VALUE / size)); return new LongIntBlockSourceOperator( LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), between(-max, max))) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index ae5aaa5b21965..e9523c5583cd4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -27,9 +27,9 @@ public class SumLongAggregatorFunctionTests extends AggregatorFunctionTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override @@ -53,7 +53,7 @@ public void testOverflowFails() { try ( Driver d = new Driver( driverContext, - new SequenceLongBlockSourceOperator(LongStream.of(Long.MAX_VALUE - 1, 2)), + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.of(Long.MAX_VALUE - 1, 2)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java index e0dc918b515d6..827dc06a4f542 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunctionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; @@ -32,9 +33,10 @@ protected String expectedDescriptionOfAggregator() { } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size / 5); return new TupleBlockSourceOperator( + blockFactory, LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLongBetween(-max, max))) ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java index de552d242afa2..2179e68c47832 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java @@ -7,47 +7,48 @@ package org.elasticsearch.compute.data; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.List; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class BlockBuilderTests extends ESTestCase { - - public void testAllNullsInt() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(IntBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(IntBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); + @ParametersFactory + public static List params() { + List params = new ArrayList<>(); + for (ElementType elementType : ElementType.values()) { + if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + continue; + } + params.add(new Object[] { elementType }); } + return params; } - public void testAllNullsLong() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(LongBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(LongBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); - } - } + private final ElementType elementType; - public void testAllNullsDouble() { - for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(DoubleBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(DoubleBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); - } + public BlockBuilderTests(ElementType elementType) { + this.elementType = elementType; } - public void testAllNullsBytesRef() { + public void testAllNulls() { for (int numEntries : List.of(1, randomIntBetween(1, 100))) { - testAllNullsImpl(BytesRefBlock.newBlockBuilder(0), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(100), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(1000), numEntries); - testAllNullsImpl(BytesRefBlock.newBlockBuilder(randomIntBetween(0, 100)), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(0), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(100), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(1000), numEntries); + testAllNullsImpl(elementType.newBlockBuilder(randomIntBetween(0, 100)), numEntries); } } @@ -65,4 +66,95 @@ private void testAllNullsImpl(Block.Builder builder, int numEntries) { static int randomPosition(int positionCount) { return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); } + + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + elementType.newBlockBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBuildSmallSingleValued() { + testBuild(between(1, 100), false, 1); + } + + public void testBuildHugeSingleValued() { + testBuild(between(1_000, 50_000), false, 1); + } + + public void testBuildSmallSingleValuedNullable() { + testBuild(between(1, 100), true, 1); + } + + public void testBuildHugeSingleValuedNullable() { + testBuild(between(1_000, 50_000), true, 1); + } + + public void testBuildSmallMultiValued() { + testBuild(between(1, 100), false, 3); + } + + public void testBuildHugeMultiValued() { + testBuild(between(1_000, 50_000), false, 3); + } + + public void testBuildSmallMultiValuedNullable() { + testBuild(between(1, 100), true, 3); + } + + public void testBuildHugeMultiValuedNullable() { + testBuild(between(1_000, 50_000), true, 3); + } + + public void testBuildSingle() { + testBuild(1, false, 1); + } + + private void testBuild(int size, boolean nullable, int maxValueCount) { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Block.Builder builder = elementType.newBlockBuilder(randomBoolean() ? size : 1, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, nullable, 1, maxValueCount, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testDoubleBuild() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testCranky() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + try { + try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + builder.copyFrom(random.block(), 0, random.block().getPositionCount()); + try (Block built = builder.build()) { + assertThat(built, equalTo(random.block())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index a524221dd50d7..24d4e27e92ad1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -49,11 +49,29 @@ public static BlockFactory blockFactory(ByteSizeValue size) { @ParametersFactory public static List params() { - List> l = List.of(() -> { - CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - return BlockFactory.getInstance(breaker, bigArrays); - }, BlockFactory::getGlobalInstance); + List> l = List.of(new Supplier<>() { + @Override + public BlockFactory get() { + CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + return BlockFactory.getInstance(breaker, bigArrays); + } + + @Override + public String toString() { + return "1gb"; + } + }, new Supplier<>() { + @Override + public BlockFactory get() { + return BlockFactory.getGlobalInstance(); + } + + @Override + public String toString() { + return "global"; + } + }); return l.stream().map(s -> new Object[] { s }).toList(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java index a5637128705ca..dd61c8f6478d3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java @@ -77,6 +77,7 @@ public static void readInto(List> values, Page page) { for (int i = 0; i < page.getBlockCount(); i++) { readInto(values.get(i), page.getBlock(i)); } + page.releaseBlocks(); } public static void readInto(List values, Block block) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index 0eb9beec2e7f9..ee654497c1ec3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -18,7 +18,6 @@ import java.util.Arrays; import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class BytesRefBlockEqualityTests extends ESTestCase { @@ -332,10 +331,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - BytesRefBlock block1 = builder.build(); - BytesRefBlock block2 = builder.build(); + BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + BytesRefBlock block1 = builder1.build(); + BytesRefBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -365,15 +368,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder3 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendBytesRef(new BytesRef(Integer.toHexString(randomInt())))); + for (int i = 0; i < values; i++) { + BytesRef value = new BytesRef(Integer.toHexString(randomInt())); + builder1.appendBytesRef(value); + builder2.appendBytesRef(value); + builder3.appendBytesRef(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - BytesRefBlock block1 = builder.build(); - BytesRefBlock block2 = builder.build(); - BytesRefBlock block3 = builder.build(); + BytesRefBlock block1 = builder1.build(); + BytesRefBlock block2 = builder2.build(); + BytesRefBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index d8258ab28a078..0f76e024c7436 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; @@ -98,33 +99,37 @@ public void testRandomShardSegmentDocMap() { } private void assertShardSegmentDocMap(int[][] data, int[][] expected) { - DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length); - for (int r = 0; r < data.length; r++) { - builder.appendShard(data[r][0]); - builder.appendSegment(data[r][1]); - builder.appendDoc(data[r][2]); + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length, blockFactory)) { + for (int r = 0; r < data.length; r++) { + builder.appendShard(data[r][0]); + builder.appendSegment(data[r][1]); + builder.appendDoc(data[r][2]); + } + try (DocVector docVector = builder.build().asVector()) { + int[] forwards = docVector.shardSegmentDocMapForwards(); + + int[][] result = new int[docVector.getPositionCount()][]; + for (int p = 0; p < result.length; p++) { + result[p] = new int[] { + docVector.shards().getInt(forwards[p]), + docVector.segments().getInt(forwards[p]), + docVector.docs().getInt(forwards[p]) }; + } + assertThat(result, equalTo(expected)); + + int[] backwards = docVector.shardSegmentDocMapBackwards(); + for (int p = 0; p < result.length; p++) { + result[p] = new int[] { + docVector.shards().getInt(backwards[forwards[p]]), + docVector.segments().getInt(backwards[forwards[p]]), + docVector.docs().getInt(backwards[forwards[p]]) }; + } + + assertThat(result, equalTo(data)); + } } - DocVector docVector = builder.build().asVector(); - int[] forwards = docVector.shardSegmentDocMapForwards(); - - int[][] result = new int[docVector.getPositionCount()][]; - for (int p = 0; p < result.length; p++) { - result[p] = new int[] { - docVector.shards().getInt(forwards[p]), - docVector.segments().getInt(forwards[p]), - docVector.docs().getInt(forwards[p]) }; - } - assertThat(result, equalTo(expected)); - - int[] backwards = docVector.shardSegmentDocMapBackwards(); - for (int p = 0; p < result.length; p++) { - result[p] = new int[] { - docVector.shards().getInt(backwards[forwards[p]]), - docVector.segments().getInt(backwards[forwards[p]]), - docVector.docs().getInt(backwards[forwards[p]]) }; - } - - assertThat(result, equalTo(data)); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } public void testCannotDoubleRelease() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index 2abbcc0b989f1..7dda97f52834e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class DoubleBlockEqualityTests extends ESTestCase { @@ -224,10 +223,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - DoubleBlock block1 = builder.build(); - DoubleBlock block2 = builder.build(); + DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + DoubleBlock block1 = builder1.build(); + DoubleBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -248,15 +251,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder3 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendDouble(randomDouble())); + for (int i = 0; i < values; i++) { + double value = randomDouble(); + builder1.appendDouble(value); + builder2.appendDouble(value); + builder3.appendDouble(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - DoubleBlock block1 = builder.build(); - DoubleBlock block2 = builder.build(); - DoubleBlock block3 = builder.build(); + DoubleBlock block1 = builder1.build(); + DoubleBlock block2 = builder2.build(); + DoubleBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index c4e19106d4368..40c84324f13d2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class IntBlockEqualityTests extends ESTestCase { @@ -185,10 +184,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - IntBlock block1 = builder.build(); - IntBlock block2 = builder.build(); + IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + IntBlock block1 = builder1.build(); + IntBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -210,15 +213,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder3 = IntBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendInt(randomInt())); + for (int i = 0; i < values; i++) { + int value = randomInt(); + builder1.appendInt(value); + builder2.appendInt(value); + builder3.appendInt(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - IntBlock block1 = builder.build(); - IntBlock block2 = builder.build(); - IntBlock block3 = builder.build(); + IntBlock block1 = builder1.build(); + IntBlock block2 = builder2.build(); + IntBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index 3d08b2a96d635..a24b4a4dd6fa6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -11,7 +11,6 @@ import java.util.BitSet; import java.util.List; -import java.util.stream.IntStream; public class LongBlockEqualityTests extends ESTestCase { @@ -191,10 +190,14 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = LongBlock.newBlockBuilder(grow ? 0 : positions); - IntStream.range(0, positions).forEach(i -> builder.appendNull()); - LongBlock block1 = builder.build(); - LongBlock block2 = builder.build(); + LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + for (int p = 0; p < positions; p++) { + builder1.appendNull(); + builder2.appendNull(); + } + LongBlock block1 = builder1.build(); + LongBlock block2 = builder2.build(); assertEquals(positions, block1.getPositionCount()); assertTrue(block1.mayHaveNulls()); assertTrue(block1.isNull(0)); @@ -216,15 +219,27 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - var builder = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder3 = LongBlock.newBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { - builder.beginPositionEntry(); - int values = randomIntBetween(1, 16); - IntStream.range(0, values).forEach(i -> builder.appendLong(randomLong())); + builder1.beginPositionEntry(); + builder2.beginPositionEntry(); + builder3.beginPositionEntry(); + int valueCount = randomIntBetween(1, 16); + for (int i = 0; i < valueCount; i++) { + long value = randomLong(); + builder1.appendLong(value); + builder2.appendLong(value); + builder3.appendLong(value); + } + builder1.endPositionEntry(); + builder2.endPositionEntry(); + builder3.endPositionEntry(); } - LongBlock block1 = builder.build(); - LongBlock block2 = builder.build(); - LongBlock block3 = builder.build(); + LongBlock block1 = builder1.build(); + LongBlock block2 = builder2.build(); + LongBlock block3 = builder3.build(); assertEquals(positions, block1.getPositionCount()); assertAllEquals(List.of(block1, block2, block3)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index 4684da93a661a..d9377a490368d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -139,6 +139,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public IntBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestLongBlockBuilder extends TestBlockBuilder { @@ -195,6 +200,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public LongBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestDoubleBlockBuilder extends TestBlockBuilder { @@ -251,6 +261,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public DoubleBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestBytesRefBlockBuilder extends TestBlockBuilder { @@ -307,6 +322,11 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public BytesRefBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } private static class TestBooleanBlockBuilder extends TestBlockBuilder { @@ -366,5 +386,10 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { public BooleanBlock build() { return builder.build(); } + + @Override + public void close() { + builder.close(); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java new file mode 100644 index 0000000000000..656d79070f217 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class VectorBuilderTests extends ESTestCase { + @ParametersFactory + public static List params() { + List params = new ArrayList<>(); + for (ElementType elementType : ElementType.values()) { + if (elementType == ElementType.UNKNOWN || elementType == ElementType.NULL || elementType == ElementType.DOC) { + continue; + } + params.add(new Object[] { elementType }); + } + return params; + } + + private final ElementType elementType; + + public VectorBuilderTests(ElementType elementType) { + this.elementType = elementType; + } + + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + vectorBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBuildSmall() { + testBuild(between(1, 100)); + } + + public void testBuildHuge() { + testBuild(between(1_000, 50_000)); + } + + public void testBuildSingle() { + testBuild(1); + } + + private void testBuild(int size) { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Vector.Builder builder = vectorBuilder(randomBoolean() ? size : 1, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testDoubleBuild() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testCranky() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + try { + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + } + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return switch (elementType) { + case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case BOOLEAN -> BooleanVector.newVectorBuilder(estimatedSize, blockFactory); + case BYTES_REF -> BytesRefVector.newVectorBuilder(estimatedSize, blockFactory); + case DOUBLE -> DoubleVector.newVectorBuilder(estimatedSize, blockFactory); + case INT -> IntVector.newVectorBuilder(estimatedSize, blockFactory); + case LONG -> LongVector.newVectorBuilder(estimatedSize, blockFactory); + }; + } + + private void fill(Vector.Builder builder, Vector from) { + switch (elementType) { + case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); + case BOOLEAN -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((BooleanVector.Builder) builder).appendBoolean(((BooleanVector) from).getBoolean(p)); + } + } + case BYTES_REF -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((BytesRefVector.Builder) builder).appendBytesRef(((BytesRefVector) from).getBytesRef(p, new BytesRef())); + } + } + case DOUBLE -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((DoubleVector.Builder) builder).appendDouble(((DoubleVector) from).getDouble(p)); + } + } + case INT -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((IntVector.Builder) builder).appendInt(((IntVector) from).getInt(p)); + } + } + case LONG -> { + for (int p = 0; p < from.getPositionCount(); p++) { + ((LongVector.Builder) builder).appendLong(((LongVector) from).getLong(p)); + } + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index 9fa9f7e32c654..df67ee2e7822a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -45,6 +45,12 @@ public VectorFixedBuilderTests(ElementType elementType) { this.elementType = elementType; } + public void testCloseWithoutBuilding() { + BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); + vectorBuilder(10, blockFactory).close(); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + public void testBuildSmall() { testBuild(between(1, 100)); } @@ -59,25 +65,32 @@ public void testBuildSingle() { private void testBuild(int size) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - Vector.Builder builder = vectorBuilder(size, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); - assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + try (Vector.Builder builder = vectorBuilder(size, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + assertThat(blockFactory.breaker().getUsed(), equalTo(built.ramBytesUsed())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); } public void testDoubleBuild() { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - Vector.Builder builder = vectorBuilder(10, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); - fill(builder, random.block().asVector()); - try (Vector built = builder.build()) { - assertThat(built, equalTo(random.block().asVector())); + try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { + BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + fill(builder, random.block().asVector()); + try (Vector built = builder.build()) { + assertThat(built, equalTo(random.block().asVector())); + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + Exception e = expectThrows(IllegalStateException.class, builder::build); + assertThat(e.getMessage(), equalTo("already closed")); } - Exception e = expectThrows(IllegalStateException.class, builder::build); - assertThat(e.getMessage(), equalTo("already closed")); + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } public void testCranky() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 64edcaa43d89b..4776b40e12115 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; @@ -115,7 +116,7 @@ static Operator.OperatorFactory factory(IndexReader reader, ValuesSourceType vsT } @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { // The test wants more than one segment. We shoot for about 10. int commitEvery = Math.max(1, size / 10); try ( @@ -198,21 +199,35 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testLoadAll() { - loadSimpleAndAssert(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024)))); + DriverContext driverContext = driverContext(); + loadSimpleAndAssert( + driverContext, + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ); } public void testLoadAllInOnePage() { + DriverContext driverContext = driverContext(); loadSimpleAndAssert( - List.of(CannedSourceOperator.mergePages(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024))))) + driverContext, + List.of( + CannedSourceOperator.mergePages( + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ) + ) ); } public void testEmpty() { - loadSimpleAndAssert(CannedSourceOperator.collectPages(simpleInput(0))); + DriverContext driverContext = driverContext(); + loadSimpleAndAssert(driverContext, CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), 0))); } public void testLoadAllInOnePageShuffled() { - Page source = CannedSourceOperator.mergePages(CannedSourceOperator.collectPages(simpleInput(between(1_000, 100 * 1024)))); + DriverContext driverContext = driverContext(); + Page source = CannedSourceOperator.mergePages( + CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100 * 1024))) + ); List shuffleList = new ArrayList<>(); IntStream.range(0, source.getPositionCount()).forEach(i -> shuffleList.add(i)); Randomness.shuffle(shuffleList); @@ -222,11 +237,10 @@ public void testLoadAllInOnePageShuffled() { shuffledBlocks[b] = source.getBlock(b).filter(shuffleArray); } source = new Page(shuffledBlocks); - loadSimpleAndAssert(List.of(source)); + loadSimpleAndAssert(driverContext, List.of(source)); } - private void loadSimpleAndAssert(List input) { - DriverContext driverContext = driverContext(); + private void loadSimpleAndAssert(DriverContext driverContext, List input) { List results = new ArrayList<>(); List operators = List.of( factory( diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 9eaa1e333f66e..784d5134e9608 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -28,9 +29,9 @@ public class AggregationOperatorTests extends ForkingOperatorTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLongBetween(-max, max))); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index 8f995d9a31bc3..14d83ce252e5f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -95,7 +95,7 @@ protected final BigArrays nonBreakingBigArrays() { /** * A {@link DriverContext} with a nonBreakingBigArrays. */ - protected DriverContext driverContext() { + protected DriverContext driverContext() { // TODO make this final and return a breaking block factory return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index d5b07a713b8b4..57ea313b88dab 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -53,6 +53,25 @@ public static Page mergePages(List pages) { return new Page(blocks); } + /** + * Make a deep copy of some pages. Useful so that when the originals are + * released the copies are still live. + */ + public static List deepCopyOf(List pages) { + List out = new ArrayList<>(pages.size()); + for (Page p : pages) { + Block[] blocks = new Block[p.getBlockCount()]; + for (int b = 0; b < blocks.length; b++) { + Block orig = p.getBlock(b); + Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount()); + builder.copyFrom(orig, 0, p.getPositionCount()); + blocks[b] = builder.build(); + } + out.add(new Page(blocks)); + } + return out; + } + private final Iterator page; public CannedSourceOperator(Iterator page) { @@ -77,5 +96,9 @@ public Page getOutput() { } @Override - public void close() {} + public void close() { + while (page.hasNext()) { + page.next().releaseBlocks(); + } + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 7825e035df0db..f06a2780b7446 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; @@ -24,7 +25,7 @@ public class ColumnExtractOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List input = LongStream.range(0, end) .mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l)) .collect(Collectors.toList()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 156f37d8d8e7a..91e18214abee2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -21,8 +22,8 @@ public class EvalOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { - return new TupleBlockSourceOperator(LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { + return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } record Addition(int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index b26fe0c33fe1c..9c29471473203 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -23,8 +24,8 @@ public class FilterOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { - return new TupleBlockSourceOperator(LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { + return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } record SameLastDigit(int lhs, int rhs) implements EvalOperator.ExpressionEvaluator { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 9d1084fcc4cf3..d01a5b17ac788 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -56,7 +56,7 @@ protected final Operator.OperatorFactory simple(BigArrays bigArrays) { public final void testInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); try ( @@ -80,7 +80,7 @@ public final void testInitialFinal() { public final void testManyInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); try ( @@ -101,7 +101,7 @@ public final void testManyInitialFinal() { public final void testInitialIntermediateFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); try ( @@ -127,7 +127,7 @@ public final void testInitialIntermediateFinal() { public final void testManyInitialManyPartialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); Collections.shuffle(partials, random()); @@ -156,7 +156,7 @@ public final void testManyInitialManyPartialFinal() { // to move the data through the pipeline. public final void testManyInitialManyPartialFinalRunner() { BigArrays bigArrays = nonBreakingBigArrays(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, false /* no throwing ops */); @@ -178,7 +178,7 @@ protected void start(Driver driver, ActionListener listener) { // runner behaves correctly and also releases all resources (bigArrays) appropriately. public final void testManyInitialManyPartialFinalRunnerThrowing() { BigArrays bigArrays = nonBreakingBigArrays(); - List input = CannedSourceOperator.collectPages(simpleInput(between(1_000, 100_000))); + List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, true /* one throwing op */); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index 954a1f179f259..1afa5d3c02330 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongGroupingAggregatorFunctionTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -31,9 +32,12 @@ public class HashAggregationOperatorTests extends ForkingOperatorTestCase { @Override - protected SourceOperator simpleInput(int size) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); - return new TupleBlockSourceOperator(LongStream.range(0, size).mapToObj(l -> Tuple.tuple(l % 5, randomLongBetween(-max, max)))); + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(l % 5, randomLongBetween(-max, max))) + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 228fdf262cf62..bbbfd44014ffc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -24,8 +25,8 @@ protected Operator.OperatorFactory simple(BigArrays bigArrays) { } @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size)); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 80ac57ed539e7..21ca59e0f45a4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -24,7 +25,7 @@ public class MvExpandOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new AbstractBlockSourceOperator(8 * 1024) { private int idx; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 3b2fac5271aa6..aee600b079b81 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArray; @@ -21,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -36,6 +38,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.in; /** * Base tests for {@link Operator}s that are not {@link SourceOperator} or {@link SinkOperator}. @@ -44,7 +47,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase { /** * Valid input to be sent to {@link #simple}; */ - protected abstract SourceOperator simpleInput(int size); + protected abstract SourceOperator simpleInput(BlockFactory blockFactory, int size); /** * Assert that output from {@link #simple} is correct for the @@ -80,15 +83,27 @@ public final void testSimpleLargeInput() { * in a sane way. */ public final void testSimpleCircuitBreaking() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()); + /* + * We build two CircuitBreakers - one for the input blocks and one for the operation itself. + * The input blocks don't count against the memory usage for the limited operator that we + * build. + */ + DriverContext inputFactoryContext = driverContext(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()) + .withCircuitBreaking(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); Exception e = expectThrows( CircuitBreakingException.class, - () -> assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)) + () -> drive(simple(bigArrays).get(new DriverContext(bigArrays, blockFactory)), input.iterator()) ); assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + + // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** @@ -98,15 +113,24 @@ public final void testSimpleCircuitBreaking() { * in ctors. */ public final void testSimpleWithCranky() { - CrankyCircuitBreakerService breaker = new CrankyCircuitBreakerService(); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breaker).withCircuitBreaking(); - BlockFactory blockFactory = BlockFactory.getInstance(breaker.getBreaker("request"), bigArrays); + DriverContext inputFactoryContext = driverContext(); + List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); + + CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); + BlockFactory blockFactory = BlockFactory.getInstance(cranky.getBreaker(CircuitBreaker.REQUEST), bigArrays); try { - assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)); + List result = drive(simple(bigArrays).get(new DriverContext(bigArrays, blockFactory)), input.iterator()); + Releasables.close(() -> Iterators.map(result.iterator(), p -> p::releaseBlocks)); // Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws } catch (CircuitBreakingException e) { + logger.info("broken", e); assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); } + + // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } /** @@ -139,10 +163,12 @@ protected final List oneDriverPerPageList(Iterator> source, Sup } private void assertSimple(DriverContext context, int size) { - List input = CannedSourceOperator.collectPages(simpleInput(size)); + List input = CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), size)); + // Clone the input so that the operator can close it, then, later, we can read it again to build the assertion. + List inputClone = CannedSourceOperator.deepCopyOf(input); BigArrays bigArrays = context.bigArrays().withCircuitBreaking(); List results = drive(simple(bigArrays).get(context), input.iterator()); - assertSimpleOutput(input, results); + assertSimpleOutput(inputClone, results); results.forEach(Page::releaseBlocks); assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); } @@ -180,7 +206,11 @@ public static void runDriver(List drivers) { "dummy-session", new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance()), () -> "dummy-driver", - new SequenceLongBlockSourceOperator(LongStream.range(0, between(1, 100)), between(1, 100)), + new SequenceLongBlockSourceOperator( + BlockFactory.getNonBreakingInstance(), + LongStream.range(0, between(1, 100)), + between(1, 100) + ), List.of(), new PageConsumerOperator(page -> {}), Driver.DEFAULT_STATUS_INTERVAL, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index baa7842bdc1f9..6ec0183bbf224 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -90,7 +90,7 @@ private List randomProjection(int size) { } @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { return new TupleBlockSourceOperator(blockFactory, LongStream.range(0, end).mapToObj(l -> Tuple.tuple(l, end - l))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java index 0aa78f3ad0ab3..f7c3ee825d695 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java @@ -21,23 +21,27 @@ public class SequenceLongBlockSourceOperator extends AbstractBlockSourceOperator static final int DEFAULT_MAX_PAGE_POSITIONS = 8 * 1024; + private final BlockFactory blockFactory; + private final long[] values; - public SequenceLongBlockSourceOperator(LongStream values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, LongStream values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceLongBlockSourceOperator(LongStream values, int maxPagePositions) { + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, LongStream values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values.toArray(); } - public SequenceLongBlockSourceOperator(List values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, List values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public SequenceLongBlockSourceOperator(List values, int maxPagePositions) { + public SequenceLongBlockSourceOperator(BlockFactory blockFactory, List values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values.stream().mapToLong(Long::longValue).toArray(); } @@ -48,7 +52,7 @@ protected Page createPage(int positionOffset, int length) { array[i] = values[positionOffset + i]; } currentPosition += length; - return new Page(BlockFactory.getNonBreakingInstance().newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile + return new Page(blockFactory.newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile } protected int remaining() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index f3c67f18589fa..1e72ecb0c3ee4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; @@ -25,7 +26,7 @@ public class StringExtractOperatorTests extends OperatorTestCase { @Override - protected SourceOperator simpleInput(int end) { + protected SourceOperator simpleInput(BlockFactory blockFactory, int end) { List input = LongStream.range(0, end) .mapToObj(l -> new BytesRef("word1_" + l + " word2_" + l + " word3_" + l)) .collect(Collectors.toList()); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java index 78cff5897c917..9b87dbe01224a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java @@ -26,10 +26,6 @@ public class TupleBlockSourceOperator extends AbstractBlockSourceOperator { private final List> values; - public TupleBlockSourceOperator(Stream> values) { - this(BlockFactory.getNonBreakingInstance(), values, DEFAULT_MAX_PAGE_POSITIONS); - } - public TupleBlockSourceOperator(BlockFactory blockFactory, Stream> values) { this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } @@ -40,14 +36,14 @@ public TupleBlockSourceOperator(BlockFactory blockFactory, Stream> values) { - this(values, DEFAULT_MAX_PAGE_POSITIONS); + public TupleBlockSourceOperator(BlockFactory blockFactory, List> values) { + this(blockFactory, values, DEFAULT_MAX_PAGE_POSITIONS); } - public TupleBlockSourceOperator(List> values, int maxPagePositions) { + public TupleBlockSourceOperator(BlockFactory blockFactory, List> values, int maxPagePositions) { super(maxPagePositions); + this.blockFactory = blockFactory; this.values = values; - blockFactory = BlockFactory.getNonBreakingInstance(); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index d79fde19f5487..9c4358e5d9ee0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.DocVector; @@ -142,7 +143,13 @@ public void testNotInKey() { ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), false, value).writeValue(valuesBuilder, 0); assertThat(valuesBuilder.length(), greaterThan(0)); - ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), false, 1); + ResultBuilder result = ResultBuilder.resultBuilderFor( + BlockFactory.getNonBreakingInstance(), + testCase.type, + testCase.encoder.toUnsortable(), + false, + 1 + ); BytesRef values = valuesBuilder.bytesRefView(); result.decodeValue(values); assertThat(values.length, equalTo(0)); @@ -163,7 +170,13 @@ public void testInKey() { ValueExtractor.extractorFor(testCase.type, testCase.encoder.toUnsortable(), true, value).writeValue(valuesBuilder, 0); assertThat(valuesBuilder.length(), greaterThan(0)); - ResultBuilder result = ResultBuilder.resultBuilderFor(testCase.type, testCase.encoder.toUnsortable(), true, 1); + ResultBuilder result = ResultBuilder.resultBuilderFor( + BlockFactory.getNonBreakingInstance(), + testCase.type, + testCase.encoder.toUnsortable(), + true, + 1 + ); BytesRef keys = keysBuilder.bytesRefView(); if (testCase.type == ElementType.NULL) { assertThat(keys.length, equalTo(1)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 7491ffde6766e..95f6613d3c0a4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -37,16 +38,21 @@ import org.elasticsearch.compute.operator.TupleBlockSourceOperator; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.versionfield.Version; +import org.junit.After; +import java.lang.reflect.Field; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; @@ -143,8 +149,12 @@ protected String expectedToStringOfSimple() { } @Override - protected SourceOperator simpleInput(int size) { - return new SequenceLongBlockSourceOperator(LongStream.range(0, size).map(l -> ESTestCase.randomLong()), between(1, size * 2)); + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator( + blockFactory, + LongStream.range(0, size).map(l -> ESTestCase.randomLong()), + between(1, size * 2) + ); } @Override @@ -180,26 +190,48 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testRamBytesUsed() { + RamUsageTester.Accumulator acc = new RamUsageTester.Accumulator() { + @Override + public long accumulateObject(Object o, long shallowSize, Map fieldValues, Collection queue) { + if (o instanceof ElementType) { + return 0; // shared + } + if (o instanceof TopNEncoder) { + return 0; // shared + } + if (o instanceof CircuitBreaker) { + return 0; // shared + } + if (o instanceof BlockFactory) { + return 0; // shard + } + return super.accumulateObject(o, shallowSize, fieldValues, queue); + } + }; int topCount = 10_000; // We under-count by a few bytes because of the lists. In that end that's fine, but we need to account for it here. - long underCount = 100; - TopNOperator op = new TopNOperator.TopNOperatorFactory( - topCount, - List.of(LONG), - List.of(DEFAULT_UNSORTABLE), - List.of(new TopNOperator.SortOrder(0, true, false)), - pageSize - ).get(driverContext()); - long actualEmpty = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE) - - RamUsageTester.ramUsed(op.breaker()); - assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); - // But when we fill it then we're quite close - for (Page p : CannedSourceOperator.collectPages(simpleInput(topCount))) { - op.addInput(p); + long underCount = 200; + DriverContext context = driverContext(); + try ( + TopNOperator op = new TopNOperator.TopNOperatorFactory( + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, false)), + pageSize + ).get(context) + ) { + long actualEmpty = RamUsageTester.ramUsed(op, acc); + assertThat(op.ramBytesUsed(), both(greaterThan(actualEmpty - underCount)).and(lessThan(actualEmpty))); + // But when we fill it then we're quite close + for (Page p : CannedSourceOperator.collectPages(simpleInput(context.blockFactory(), topCount))) { + op.addInput(p); + } + long actualFull = RamUsageTester.ramUsed(op, acc); + assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); + + // TODO empty it again and check. } - long actualFull = RamUsageTester.ramUsed(op) - RamUsageTester.ramUsed(LONG) - RamUsageTester.ramUsed(DEFAULT_UNSORTABLE) - - RamUsageTester.ramUsed(op.breaker()); - assertThat(op.ramBytesUsed(), both(greaterThan(actualFull - underCount)).and(lessThan(actualFull))); } public void testRandomTopN() { @@ -471,6 +503,7 @@ public void testCollectAllValues() { new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + blockFactory, nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -559,6 +592,7 @@ public void testCollectAllValues_RandomMultiValues() { new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + blockFactory, nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -590,9 +624,10 @@ private List> topNTwoColumns( try ( Driver driver = new Driver( driverContext, - new TupleBlockSourceOperator(inputValues, randomIntBetween(1, 1000)), + new TupleBlockSourceOperator(driverContext.blockFactory(), inputValues, randomIntBetween(1, 1000)), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), limit, elementTypes, @@ -607,6 +642,7 @@ private List> topNTwoColumns( for (int i = 0; i < block1.getPositionCount(); i++) { outputValues.add(tuple(block1.isNull(i) ? null : block1.getLong(i), block2.isNull(i) ? null : block2.getLong(i))); } + page.releaseBlocks(); }), () -> {} ) @@ -848,6 +884,7 @@ private void assertSortingOnMV( TopNEncoder encoder, TopNOperator.SortOrder... sortOrders ) { + DriverContext driverContext = driverContext(); Block block = TestBlockBuilder.blockFromValues(values, blockType); assert block.mvOrdering() == Block.MvOrdering.UNORDERED : "Blocks created for this test must have unordered multi-values"; Page page = new Page(block); @@ -856,10 +893,11 @@ private void assertSortingOnMV( int topCount = randomIntBetween(1, values.size()); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(page).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, List.of(blockType), @@ -878,6 +916,7 @@ private void assertSortingOnMV( } public void testRandomMultiValuesTopN() { + DriverContext driverContext = driverContext(); int rows = randomIntBetween(50, 100); int topCount = randomIntBetween(1, rows); int blocksCount = randomIntBetween(20, 30); @@ -969,8 +1008,9 @@ public void testRandomMultiValuesTopN() { } List>> actualValues = new ArrayList<>(); - List results = this.drive( + List results = drive( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), topCount, elementTypes, @@ -982,6 +1022,7 @@ public void testRandomMultiValuesTopN() { ); for (Page p : results) { readAsRows(actualValues, p); + p.releaseBlocks(); } List>> topNExpectedValues = expectedValues.stream() @@ -1003,13 +1044,15 @@ public void testIPSortingSingleValue() throws UnknownHostException { append(builder, new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip)))); } + DriverContext driverContext = driverContext(); List> actual = new ArrayList<>(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), ips.size(), List.of(BYTES_REF), @@ -1128,12 +1171,14 @@ private void assertIPSortingOnMultiValues( } List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(builder.build())).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), ips.size(), List.of(BYTES_REF), @@ -1210,12 +1255,14 @@ public void testZeroByte() { blocks.add(builderInt.build()); List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( - driverContext(), + driverContext, new CannedSourceOperator(List.of(new Page(blocks.toArray(Block[]::new))).iterator()), List.of( new TopNOperator( + driverContext.blockFactory(), nonBreakingBigArrays().breakerService().getBreaker("request"), 2, List.of(BYTES_REF, INT), @@ -1243,10 +1290,55 @@ public void testZeroByte() { assertThat((Integer) actual.get(1).get(1), equalTo(100)); } + public void testErrorBeforeFullyDraining() { + int maxPageSize = between(1, 100); + int topCount = maxPageSize * 4; + int docCount = topCount * 10; + List> actual = new ArrayList<>(); + DriverContext driverContext = driverContext(); + try ( + Driver driver = new Driver( + driverContext, + new SequenceLongBlockSourceOperator(driverContext.blockFactory(), LongStream.range(0, docCount)), + List.of( + new TopNOperator( + driverContext.blockFactory(), + nonBreakingBigArrays().breakerService().getBreaker("request"), + topCount, + List.of(LONG), + List.of(DEFAULT_UNSORTABLE), + List.of(new TopNOperator.SortOrder(0, true, randomBoolean())), + maxPageSize + ) + ), + new PageConsumerOperator(p -> { + assertThat(p.getPositionCount(), equalTo(maxPageSize)); + if (actual.isEmpty()) { + readInto(actual, p); + } else { + p.releaseBlocks(); + throw new RuntimeException("boo"); + } + }), + () -> {} + ) + ) { + Exception e = expectThrows(RuntimeException.class, () -> runDriver(driver)); + assertThat(e.getMessage(), equalTo("boo")); + } + + ListMatcher values = matchesList(); + for (int i = 0; i < maxPageSize; i++) { + values = values.item((long) i); + } + assertMap(actual, matchesList().item(values)); + } + public void testCloseWithoutCompleting() { CircuitBreaker breaker = new MockBigArrays.LimitedBreaker(CircuitBreaker.REQUEST, ByteSizeValue.ofGb(1)); try ( TopNOperator op = new TopNOperator( + driverContext().blockFactory(), breaker, 2, List.of(INT), @@ -1257,7 +1349,23 @@ public void testCloseWithoutCompleting() { ) { op.addInput(new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock())); } - assertThat(breaker.getUsed(), equalTo(0L)); + } + + private final List breakers = new ArrayList<>(); + + @Override + protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + } + + @After + public void allBreakersEmpty() { + for (CircuitBreaker breaker : breakers) { + assertThat(breaker.getUsed(), equalTo(0L)); + } } @SuppressWarnings({ "unchecked", "rawtypes" }) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml index 280a32aa10cd3..bae0e623d12a3 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/50_index_patterns.yml @@ -235,35 +235,36 @@ disjoint_mappings: - length: { values: 1 } - match: { values.0.0: 2 } - - do: - esql.query: - body: - query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' - - match: { columns.0.name: message1 } - - match: { columns.0.type: keyword } - - match: { columns.1.name: message2 } - - match: { columns.1.type: long } - - match: { columns.2.name: x } - - match: { columns.2.type: keyword } - - match: { columns.3.name: y } - - match: { columns.3.type: long } - - length: { values: 4 } - - match: { values.0.0: foo1 } - - match: { values.0.1: null } - - match: { values.0.2: foo1 } - - match: { values.0.3: null } - - match: { values.1.0: foo2 } - - match: { values.1.1: null } - - match: { values.1.2: foo2 } - - match: { values.1.3: null } - - match: { values.2.0: null } - - match: { values.2.1: 1 } - - match: { values.2.2: null } - - match: { values.2.3: 2 } - - match: { values.3.0: null } - - match: { values.3.1: 2 } - - match: { values.3.2: null } - - match: { values.3.3: 3 } +# AwaitsFix https://github.com/elastic/elasticsearch/issues/99826 +# - do: +# esql.query: +# body: +# query: 'from test1,test2 | sort message1, message2 | eval x = message1, y = message2 + 1 | keep message1, message2, x, y' +# - match: { columns.0.name: message1 } +# - match: { columns.0.type: keyword } +# - match: { columns.1.name: message2 } +# - match: { columns.1.type: long } +# - match: { columns.2.name: x } +# - match: { columns.2.type: keyword } +# - match: { columns.3.name: y } +# - match: { columns.3.type: long } +# - length: { values: 4 } +# - match: { values.0.0: foo1 } +# - match: { values.0.1: null } +# - match: { values.0.2: foo1 } +# - match: { values.0.3: null } +# - match: { values.1.0: foo2 } +# - match: { values.1.1: null } +# - match: { values.1.2: foo2 } +# - match: { values.1.3: null } +# - match: { values.2.0: null } +# - match: { values.2.1: 1 } +# - match: { values.2.2: null } +# - match: { values.2.3: 2 } +# - match: { values.3.0: null } +# - match: { values.3.1: 2 } +# - match: { values.3.2: null } +# - match: { values.3.3: 3 } --- same_name_different_type: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec index d89f3337c081b..82fd27416c526 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec @@ -7,7 +7,8 @@ emp_no:integer |_index:keyword |_version:long 10002 |employees |1 ; -aliasWithSameName +# AwaitsFix https://github.com/elastic/elasticsearch/issues/99826 +aliasWithSameName-Ignore from employees [metadata _index, _version] | sort emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index f8aeee1569f2e..800e36949f5ea 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.Build; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -109,32 +108,33 @@ public void testFromStatsGroupingAvgWithAliases() { } private void testFromStatsGroupingAvgImpl(String command, String expectedGroupName, String expectedFieldName) { - EsqlQueryResponse results = run(command); - logger.info(results); - Assert.assertEquals(2, results.columns().size()); - - // assert column metadata - ColumnInfo valuesColumn = results.columns().get(0); - assertEquals(expectedFieldName, valuesColumn.name()); - assertEquals("double", valuesColumn.type()); - ColumnInfo groupColumn = results.columns().get(1); - assertEquals(expectedGroupName, groupColumn.name()); - assertEquals("long", groupColumn.type()); + try (EsqlQueryResponse results = run(command)) { + logger.info(results); + Assert.assertEquals(2, results.columns().size()); - // assert column values - List> valueValues = getValuesList(results); - assertEquals(2, valueValues.size()); - // This is loathsome, find a declarative way to assert the expected output. - if ((long) valueValues.get(0).get(1) == 1L) { - assertEquals(42.0, (double) valueValues.get(0).get(0), 0.0); - assertEquals(2L, (long) valueValues.get(1).get(1)); - assertEquals(44.0, (double) valueValues.get(1).get(0), 0.0); - } else if ((long) valueValues.get(0).get(1) == 2L) { - assertEquals(42.0, (double) valueValues.get(1).get(0), 0.0); - assertEquals(1L, (long) valueValues.get(1).get(1)); - assertEquals(44.0, (double) valueValues.get(0).get(0), 0.0); - } else { - fail("Unexpected group value: " + valueValues.get(0).get(0)); + // assert column metadata + ColumnInfo valuesColumn = results.columns().get(0); + assertEquals(expectedFieldName, valuesColumn.name()); + assertEquals("double", valuesColumn.type()); + ColumnInfo groupColumn = results.columns().get(1); + assertEquals(expectedGroupName, groupColumn.name()); + assertEquals("long", groupColumn.type()); + + // assert column values + List> valueValues = getValuesList(results); + assertEquals(2, valueValues.size()); + // This is loathsome, find a declarative way to assert the expected output. + if ((long) valueValues.get(0).get(1) == 1L) { + assertEquals(42.0, (double) valueValues.get(0).get(0), 0.0); + assertEquals(2L, (long) valueValues.get(1).get(1)); + assertEquals(44.0, (double) valueValues.get(1).get(0), 0.0); + } else if ((long) valueValues.get(0).get(1) == 2L) { + assertEquals(42.0, (double) valueValues.get(1).get(0), 0.0); + assertEquals(1L, (long) valueValues.get(1).get(1)); + assertEquals(44.0, (double) valueValues.get(0).get(0), 0.0); + } else { + fail("Unexpected group value: " + valueValues.get(0).get(0)); + } } } @@ -211,19 +211,20 @@ public void testFromGroupingByNumericFieldWithNulls() { } } client().admin().indices().prepareRefresh("test").get(); - EsqlQueryResponse results = run("from test | stats avg(count) by data | sort data"); - logger.info(results); + try (EsqlQueryResponse results = run("from test | stats avg(count) by data | sort data")) { + logger.info(results); - assertThat(results.columns(), hasSize(2)); - assertEquals("avg(count)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); - assertEquals("data", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertThat(results.columns(), hasSize(2)); + assertEquals("avg(count)", results.columns().get(0).name()); + assertEquals("double", results.columns().get(0).type()); + assertEquals("data", results.columns().get(1).name()); + assertEquals("long", results.columns().get(1).type()); - record Group(Long data, Double avg) {} - List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); - List actualGroups = getValuesList(results).stream().map(l -> new Group((Long) l.get(1), (Double) l.get(0))).toList(); - assertThat(actualGroups, equalTo(expectedGroups)); + record Group(Long data, Double avg) {} + List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); + List actualGroups = getValuesList(results).stream().map(l -> new Group((Long) l.get(1), (Double) l.get(0))).toList(); + assertThat(actualGroups, equalTo(expectedGroups)); + } } public void testFromStatsGroupingByKeyword() { @@ -332,18 +333,19 @@ record Group(double avg, long mi, long ma, long s, long c, String color) {} } public void testFromSortWithTieBreakerLimit() { - EsqlQueryResponse results = run("from test | sort data, count desc, time | limit 5 | keep data, count, time"); - logger.info(results); - assertThat( - getValuesList(results), - contains( - List.of(1L, 44L, epoch + 2), - List.of(1L, 44L, epoch + 6), - List.of(1L, 44L, epoch + 10), - List.of(1L, 44L, epoch + 14), - List.of(1L, 44L, epoch + 18) - ) - ); + try (EsqlQueryResponse results = run("from test | sort data, count desc, time | limit 5 | keep data, count, time")) { + logger.info(results); + assertThat( + getValuesList(results), + contains( + List.of(1L, 44L, epoch + 2), + List.of(1L, 44L, epoch + 6), + List.of(1L, 44L, epoch + 10), + List.of(1L, 44L, epoch + 14), + List.of(1L, 44L, epoch + 18) + ) + ); + } } public void testFromStatsProjectGroup() { @@ -778,10 +780,11 @@ public void testFromStatsLimit() { } public void testFromLimit() { - EsqlQueryResponse results = run("from test | keep data | limit 2"); - logger.info(results); - assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); - assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); + try (EsqlQueryResponse results = run("from test | keep data | limit 2")) { + logger.info(results); + assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); + assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); + } } public void testDropAllColumns() { @@ -1000,27 +1003,25 @@ public void testTopNPushedToLuceneOnSortedIndex() { ); int limit = randomIntBetween(1, 5); - EsqlQueryResponse results = run("from sorted_test_index | sort time " + sortOrder + " | limit " + limit + " | keep time"); - logger.info(results); - Assert.assertEquals(1, results.columns().size()); - Assert.assertEquals(limit, getValuesList(results).size()); - - // assert column metadata - assertEquals("time", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + try (EsqlQueryResponse results = run("from sorted_test_index | sort time " + sortOrder + " | limit " + limit + " | keep time")) { + logger.info(results); + Assert.assertEquals(1, results.columns().size()); + Assert.assertEquals(limit, getValuesList(results).size()); - boolean sortedDesc = "desc".equals(sortOrder); - var expected = LongStream.range(0, 40) - .map(i -> epoch + i) - .boxed() - .sorted(sortedDesc ? reverseOrder() : naturalOrder()) - .limit(limit) - .toList(); - var actual = getValuesList(results).stream().map(l -> (Long) l.get(0)).toList(); - assertThat(actual, equalTo(expected)); + // assert column metadata + assertEquals("time", results.columns().get(0).name()); + assertEquals("long", results.columns().get(0).type()); - // clean-up - client().admin().indices().delete(new DeleteIndexRequest("sorted_test_index")).actionGet(); + boolean sortedDesc = "desc".equals(sortOrder); + var expected = LongStream.range(0, 40) + .map(i -> epoch + i) + .boxed() + .sorted(sortedDesc ? reverseOrder() : naturalOrder()) + .limit(limit) + .toList(); + var actual = getValuesList(results).stream().map(l -> (Long) l.get(0)).toList(); + assertThat(actual, equalTo(expected)); + } } /* diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java index 19893bf8072f1..2636a7cf3c133 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; @@ -18,6 +19,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.TransportSettings; @@ -29,7 +31,9 @@ import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +@TestLogging(value = "org.elasticsearch.indices.breaker:TRACE", reason = "failing") @ESIntegTestCase.ClusterScope(scope = TEST, minNumDataNodes = 2, maxNumDataNodes = 4) +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99173") public class EsqlDisruptionIT extends EsqlActionIT { // copied from AbstractDisruptionTestCase