From 6de5edd70d4e322d3c86b5c0fb4fa256fc17aa0f Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 23 Nov 2021 09:11:11 +0100 Subject: [PATCH 1/7] Fix several potential circuit breaker leaks in Aggregators (#79676) This commit adds a new CircuitBreaker implementation in the test that throws CircuitBreaker Exceptions randomly. This new circuit breaker helps uncover several places where we might leak if the circuit breaker throws such exception. --- .../bucket/BucketsAggregator.java | 9 +- .../bucket/composite/BinaryValuesSource.java | 10 +- .../bucket/composite/CompositeAggregator.java | 4 +- .../bucket/composite/DoubleValuesSource.java | 10 +- .../bucket/composite/LongValuesSource.java | 10 +- .../AutoDateHistogramAggregator.java | 18 ++- .../VariableWidthHistogramAggregator.java | 11 +- .../bucket/terms/BytesKeyedBucketOrds.java | 10 +- .../GlobalOrdinalsStringTermsAggregator.java | 11 +- .../terms/MapStringTermsAggregator.java | 14 +- .../bucket/terms/NumericTermsAggregator.java | 10 +- .../bucket/terms/SignificanceLookup.java | 92 ++++++++----- .../SignificantTextAggregatorFactory.java | 50 ++++--- .../search/sort/BucketedSort.java | 33 ++++- .../aggregations/AggregatorTestCase.java | 126 +++++++++++++++++- .../topmetrics/TopMetricsAggregator.java | 4 +- .../TopMetricsAggregatorFactory.java | 44 ++++-- .../analytics/ttest/TTestStatsBuilder.java | 16 ++- 18 files changed, 391 insertions(+), 91 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 8be166c0fe4a5..449c46078d37b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -99,8 +99,11 @@ public final void collectExistingBucket(LeafBucketCollector subCollector, int do * If a bucket's ordinal is mapped to -1 then the bucket is removed entirely. */ public final void rewriteBuckets(long newNumBuckets, LongUnaryOperator mergeMap) { - try (LongArray oldDocCounts = docCounts) { + LongArray oldDocCounts = docCounts; + boolean success = false; + try { docCounts = bigArrays().newLongArray(newNumBuckets, true); + success = true; docCounts.fill(0, newNumBuckets, 0); for (long i = 0; i < oldDocCounts.size(); i++) { long docCount = oldDocCounts.get(i); @@ -113,6 +116,10 @@ public final void rewriteBuckets(long newNumBuckets, LongUnaryOperator mergeMap) docCounts.increment(destinationOrdinal, docCount); } } + } finally { + if (success) { + oldDocCounts.close(); + } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java index 8d8cf61353ebf..8adb8c9b364fc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/BinaryValuesSource.java @@ -52,7 +52,15 @@ class BinaryValuesSource extends SingleDimensionValuesSource { this.breakerConsumer = breakerConsumer; this.docValuesFunc = docValuesFunc; this.values = bigArrays.newObjectArray(Math.min(size, 100)); - this.valueBuilders = bigArrays.newObjectArray(Math.min(size, 100)); + boolean success = false; + try { + this.valueBuilders = bigArrays.newObjectArray(Math.min(size, 100)); + success = true; + } finally { + if (success == false) { + close(); + } + } } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index da4f3669c09eb..63bc10a7e9e29 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -149,7 +149,9 @@ protected void doClose() { try { Releasables.close(queue); } finally { - Releasables.close(sources); + if (sources != null) { + Releasables.close(sources); + } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java index bf75097beca9a..e1b15f0db93ea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DoubleValuesSource.java @@ -46,7 +46,15 @@ class DoubleValuesSource extends SingleDimensionValuesSource { super(bigArrays, format, fieldType, missingBucket, missingOrder, size, reverseMul); this.docValuesFunc = docValuesFunc; this.bits = this.missingBucket ? new BitArray(100, bigArrays) : null; - this.values = bigArrays.newDoubleArray(Math.min(size, 100), false); + boolean success = false; + try { + this.values = bigArrays.newDoubleArray(Math.min(size, 100), false); + success = true; + } finally { + if (success == false) { + close(); + } + } } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java index 5bc9ee0906fe8..821958f10b3ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/LongValuesSource.java @@ -64,7 +64,15 @@ class LongValuesSource extends SingleDimensionValuesSource { this.docValuesFunc = docValuesFunc; this.rounding = rounding; this.bits = missingBucket ? new BitArray(Math.min(size, 100), bigArrays) : null; - this.values = bigArrays.newLongArray(Math.min(size, 100), false); + boolean success = false; + try { + this.values = bigArrays.newLongArray(Math.min(size, 100), false); + success = true; + } finally { + if (success == false) { + close(); + } + } } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index ebfc2dcbbfa1b..748f05aef67e7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -276,10 +276,13 @@ private void increaseRoundingIfNeeded(long rounded) { return; } do { - try (LongKeyedBucketOrds oldOrds = bucketOrds) { + LongKeyedBucketOrds oldOrds = bucketOrds; + boolean success = false; + try { preparedRounding = prepareRounding(++roundingIdx); long[] mergeMap = new long[Math.toIntExact(oldOrds.size())]; bucketOrds = new LongKeyedBucketOrds.FromSingle(bigArrays()); + success = true; // now it is safe to close oldOrds after we finish LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = oldOrds.ordsEnum(0); while (ordsEnum.next()) { long oldKey = ordsEnum.value(); @@ -288,6 +291,10 @@ private void increaseRoundingIfNeeded(long rounded) { mergeMap[(int) ordsEnum.ord()] = newBucketOrd >= 0 ? newBucketOrd : -1 - newBucketOrd; } merge(mergeMap, bucketOrds.size()); + } finally { + if (success) { + oldOrds.close(); + } } } while (roundingIdx < roundingInfos.length - 1 && (bucketOrds.size() > targetBuckets * roundingInfos[roundingIdx].getMaximumInnerInterval() @@ -527,9 +534,12 @@ private int increaseRoundingIfNeeded(long owningBucketOrd, int oldEstimatedBucke private void rebucket() { rebucketCount++; - try (LongKeyedBucketOrds oldOrds = bucketOrds) { + LongKeyedBucketOrds oldOrds = bucketOrds; + boolean success = false; + try { long[] mergeMap = new long[Math.toIntExact(oldOrds.size())]; bucketOrds = new LongKeyedBucketOrds.FromMany(bigArrays()); + success = true; for (long owningBucketOrd = 0; owningBucketOrd <= oldOrds.maxOwningBucketOrd(); owningBucketOrd++) { LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = oldOrds.ordsEnum(owningBucketOrd); Rounding.Prepared preparedRounding = preparedRoundings[roundingIndexFor(owningBucketOrd)]; @@ -543,6 +553,10 @@ private void rebucket() { liveBucketCountUnderestimate.set(owningBucketOrd, Math.toIntExact(bucketOrds.bucketsInOrd(owningBucketOrd))); } merge(mergeMap, bucketOrds.size()); + } finally { + if (success) { + oldOrds.close(); + } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java index 6802fcd8ea2bc..299c67fec4995 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java @@ -159,7 +159,16 @@ private class MergeBucketsPhase extends CollectionPhase { MergeBucketsPhase(DoubleArray buffer, int bufferSize) { // Cluster the documents to reduce the number of buckets - bucketBufferedDocs(buffer, bufferSize, mergePhaseInitialBucketCount(shardSize)); + boolean success = false; + try { + bucketBufferedDocs(buffer, bufferSize, mergePhaseInitialBucketCount(shardSize)); + success = true; + } finally { + if (success == false) { + close(); + clusterMaxes = clusterMins = clusterCentroids = clusterSizes = null; + } + } if (bufferSize > 1) { updateAvgBucketDistance(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java index 680e45326e0f5..e924b93a52407 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/BytesKeyedBucketOrds.java @@ -159,7 +159,15 @@ private static class FromMany extends BytesKeyedBucketOrds { private FromMany(BigArrays bigArrays) { bytesToLong = new BytesRefHash(1, bigArrays); - longToBucketOrds = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.MANY); + boolean success = false; + try { + longToBucketOrds = LongKeyedBucketOrds.build(bigArrays, CardinalityUpperBound.MANY); + success = true; + } finally { + if (success == false) { + close(); + } + } } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index cdba34a831ad2..1bc81b03ff543 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -812,7 +812,7 @@ class SignificantTermsResults extends ResultStrategy< private final long supersetSize; private final SignificanceHeuristic significanceHeuristic; - private LongArray subsetSizes = bigArrays().newLongArray(1, true); + private LongArray subsetSizes; SignificantTermsResults( SignificanceLookup significanceLookup, @@ -822,6 +822,15 @@ class SignificantTermsResults extends ResultStrategy< backgroundFrequencies = significanceLookup.bytesLookup(bigArrays(), cardinality); supersetSize = significanceLookup.supersetSize(); this.significanceHeuristic = significanceHeuristic; + boolean success = false; + try { + subsetSizes = bigArrays().newLongArray(1, true); + success = true; + } finally { + if (success == false) { + close(); + } + } } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index 8ba01b3fae754..dbcdde4ed1e11 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -69,10 +69,11 @@ public MapStringTermsAggregator( Map metadata ) throws IOException { super(name, factories, context, parent, order, format, bucketCountThresholds, collectionMode, showTermDocCountError, metadata); - this.collectorSource = collectorSource; this.resultStrategy = resultStrategy.apply(this); // ResultStrategy needs a reference to the Aggregator to do its job. this.includeExclude = includeExclude; bucketOrds = BytesKeyedBucketOrds.build(context.bigArrays(), cardinality); + // set last because if there is an error during construction the collector gets release outside the constructor. + this.collectorSource = collectorSource; } @Override @@ -478,7 +479,7 @@ class SignificantTermsResults extends ResultStrategy a.new SignificantTermsResults(lookup, significanceHeuristic, cardinality), - null, - DocValueFormat.RAW, - bucketCountThresholds, - incExcFilter, - context, - parent, - SubAggCollectionMode.BREADTH_FIRST, - false, - cardinality, - metadata - ); + final IncludeExclude.StringFilter incExcFilter = includeExclude == null + ? null + : includeExclude.convertToStringFilter(DocValueFormat.RAW); + + final SignificanceLookup lookup = new SignificanceLookup(context, fieldType, DocValueFormat.RAW, backgroundFilter); + final CollectorSource collectorSource = createCollectorSource(); + boolean success = false; + try { + final MapStringTermsAggregator mapStringTermsAggregator = new MapStringTermsAggregator( + name, + factories, + collectorSource, + a -> a.new SignificantTermsResults(lookup, significanceHeuristic, cardinality), + null, + DocValueFormat.RAW, + bucketCountThresholds, + incExcFilter, + context, + parent, + SubAggCollectionMode.BREADTH_FIRST, + false, + cardinality, + metadata + ); + success = true; + return mapStringTermsAggregator; + } finally { + if (success == false) { + Releasables.close(collectorSource); + } + } } /** diff --git a/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java b/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java index ad0e704665cd5..f3f15111f8e9b 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java +++ b/server/src/main/java/org/elasticsearch/search/sort/BucketedSort.java @@ -453,10 +453,19 @@ private ExtraData.Loader loader() throws IOException { * Superclass for implementations of {@linkplain BucketedSort} for {@code double} keys. */ public abstract static class ForDoubles extends BucketedSort { - private DoubleArray values = bigArrays.newDoubleArray(getBucketSize(), false); + private DoubleArray values; public ForDoubles(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format, int bucketSize, ExtraData extra) { super(bigArrays, sortOrder, format, bucketSize, extra); + boolean success = false; + try { + values = bigArrays.newDoubleArray(getBucketSize(), false); + success = true; + } finally { + if (success == false) { + close(); + } + } initGatherOffsets(); } @@ -544,7 +553,7 @@ public abstract static class ForFloats extends BucketedSort { */ public static final int MAX_BUCKET_SIZE = (int) Math.pow(2, 24); - private FloatArray values = bigArrays.newFloatArray(1, false); + private FloatArray values; public ForFloats(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format, int bucketSize, ExtraData extra) { super(bigArrays, sortOrder, format, bucketSize, extra); @@ -552,6 +561,15 @@ public ForFloats(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format close(); throw new IllegalArgumentException("bucket size must be less than [2^24] but was [" + bucketSize + "]"); } + boolean success = false; + try { + values = bigArrays.newFloatArray(1, false); + success = true; + } finally { + if (success == false) { + close(); + } + } initGatherOffsets(); } @@ -626,10 +644,19 @@ protected final boolean docBetterThan(long index) { * Superclass for implementations of {@linkplain BucketedSort} for {@code long} keys. */ public abstract static class ForLongs extends BucketedSort { - private LongArray values = bigArrays.newLongArray(1, false); + private LongArray values; public ForLongs(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format, int bucketSize, ExtraData extra) { super(bigArrays, sortOrder, format, bucketSize, extra); + boolean success = false; + try { + values = bigArrays.newLongArray(1, false); + success = true; + } finally { + if (success == false) { + close(); + } + } initGatherOffsets(); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 533e56e6936ef..41fc2ff803634 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -94,7 +95,9 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.breaker.AllCircuitBreakerStats; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.CircuitBreakerStats; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; @@ -471,11 +474,13 @@ protected A searchAndReduc * Collects all documents that match the provided query {@link Query} and * returns the reduced {@link InternalAggregation}. * + * It runs the aggregation as well using a circuit breaker that randomly throws {@link CircuitBreakingException} + * in order to mak sure the implementation does not leak. + * * @param splitLeavesIntoSeparateAggregators If true this creates a new {@link Aggregator} * for each leaf as though it were a separate index. If false this aggregates * all leaves together, like we do in production. */ - @SuppressWarnings("unchecked") protected A searchAndReduce( IndexSettings indexSettings, IndexSearcher searcher, @@ -484,12 +489,57 @@ protected A searchAndReduc int maxBucket, boolean splitLeavesIntoSeparateAggregators, MappedFieldType... fieldTypes + ) throws IOException { + // First run it to find circuit breaker leaks on the aggregator + CircuitBreakerService crankyService = new CrankyCircuitBreakerService(); + for (int i = 0; i < 5; i++) { + try { + searchAndReduce( + indexSettings, + searcher, + query, + builder, + maxBucket, + splitLeavesIntoSeparateAggregators, + crankyService, + fieldTypes + ); + } catch (CircuitBreakingException e) { + // expected + } catch (IOException e) { + throw e; + } + } + // Second run it to the end + CircuitBreakerService breakerService = new NoneCircuitBreakerService(); + return searchAndReduce( + indexSettings, + searcher, + query, + builder, + maxBucket, + splitLeavesIntoSeparateAggregators, + breakerService, + fieldTypes + ); + } + + @SuppressWarnings("unchecked") + private A searchAndReduce( + IndexSettings indexSettings, + IndexSearcher searcher, + Query query, + AggregationBuilder builder, + int maxBucket, + boolean splitLeavesIntoSeparateAggregators, + CircuitBreakerService breakerService, + MappedFieldType... fieldTypes ) throws IOException { final IndexReaderContext ctx = searcher.getTopReaderContext(); final PipelineTree pipelines = builder.buildPipelineTree(); List aggs = new ArrayList<>(); Query rewritten = searcher.rewrite(query); - CircuitBreakerService breakerService = new NoneCircuitBreakerService(); + AggregationContext context = createAggregationContext( searcher, indexSettings, @@ -1313,4 +1363,76 @@ public List getAggregations() { ); } } + + private static class CrankyCircuitBreakerService extends CircuitBreakerService { + + private final CircuitBreaker breaker = new CircuitBreaker() { + @Override + public void circuitBreak(String fieldName, long bytesNeeded) { + + } + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + if (random().nextInt(20) == 0) { + throw new CircuitBreakingException("fake error", Durability.PERMANENT); + } + } + + @Override + public void addWithoutBreaking(long bytes) { + + } + + @Override + public long getUsed() { + return 0; + } + + @Override + public long getLimit() { + return 0; + } + + @Override + public double getOverhead() { + return 0; + } + + @Override + public long getTrippedCount() { + return 0; + } + + @Override + public String getName() { + return CircuitBreaker.FIELDDATA; + } + + @Override + public Durability getDurability() { + return null; + } + + @Override + public void setLimitAndOverhead(long limit, double overhead) { + + } + }; + + @Override + public CircuitBreaker getBreaker(String name) { + return breaker; + } + + @Override + public AllCircuitBreakerStats stats() { + return new AllCircuitBreakerStats(new CircuitBreakerStats[] { stats(CircuitBreaker.FIELDDATA) }); + } + + @Override + public CircuitBreakerStats stats(String name) { + return new CircuitBreakerStats(CircuitBreaker.FIELDDATA, -1, -1, 0, 0); + } + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java index 575be34199aba..7a3d8a497ea18 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregator.java @@ -76,7 +76,8 @@ class TopMetricsAggregator extends NumericMetricsAggregator.MultiValue { ) throws IOException { super(name, context, parent, metadata); this.size = size; - this.metrics = new TopMetricsAggregator.Metrics(metricValues); + // In case of failure we are releasing this objects outside therefore we need to set it at the end. + TopMetricsAggregator.Metrics metrics = new TopMetricsAggregator.Metrics(metricValues); /* * If we're only collecting a single value then only provided *that* * value to the sort so that swaps and loads are just a little faster @@ -84,6 +85,7 @@ class TopMetricsAggregator extends NumericMetricsAggregator.MultiValue { */ BucketedSort.ExtraData values = metrics.values.length == 1 ? metrics.values[0] : metrics; this.sort = context.buildBucketedSort(sort, size, values); + this.metrics = metrics; } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java index e79dd650219ec..199031f3f4e72 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorFactory.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -77,21 +78,38 @@ protected TopMetricsAggregator createInternal(Aggregator parent, CardinalityUppe ); } MetricValues[] metricValues = new MetricValues[metricFields.size()]; - for (int i = 0; i < metricFields.size(); i++) { - MultiValuesSourceFieldConfig config = metricFields.get(i); - ValuesSourceConfig vsConfig = ValuesSourceConfig.resolve( + boolean success = false; + try { + for (int i = 0; i < metricFields.size(); i++) { + MultiValuesSourceFieldConfig config = metricFields.get(i); + ValuesSourceConfig vsConfig = ValuesSourceConfig.resolve( + context, + null, + config.getFieldName(), + config.getScript(), + config.getMissing(), + config.getTimeZone(), + null, + CoreValuesSourceType.NUMERIC + ); + MetricValuesSupplier supplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, vsConfig); + metricValues[i] = supplier.build(size, context.bigArrays(), config.getFieldName(), vsConfig); + } + TopMetricsAggregator aggregator = new TopMetricsAggregator( + name, context, - null, - config.getFieldName(), - config.getScript(), - config.getMissing(), - config.getTimeZone(), - null, - CoreValuesSourceType.NUMERIC + parent, + metadata, + size, + sortBuilders.get(0), + metricValues ); - MetricValuesSupplier supplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, vsConfig); - metricValues[i] = supplier.build(size, context.bigArrays(), config.getFieldName(), vsConfig); + success = true; + return aggregator; + } finally { + if (success == false) { + Releasables.close(metricValues); + } } - return new TopMetricsAggregator(name, context, parent, metadata, size, sortBuilders.get(0), metricValues); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java index 25755a745a387..8e85d16e01f9f 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestStatsBuilder.java @@ -24,10 +24,18 @@ public class TTestStatsBuilder implements Releasable { TTestStatsBuilder(BigArrays bigArrays) { counts = bigArrays.newLongArray(1, true); - sums = bigArrays.newDoubleArray(1, true); - compensations = bigArrays.newDoubleArray(1, true); - sumOfSqrs = bigArrays.newDoubleArray(1, true); - sumOfSqrCompensations = bigArrays.newDoubleArray(1, true); + boolean success = false; + try { + sums = bigArrays.newDoubleArray(1, true); + compensations = bigArrays.newDoubleArray(1, true); + sumOfSqrs = bigArrays.newDoubleArray(1, true); + sumOfSqrCompensations = bigArrays.newDoubleArray(1, true); + success = true; + } finally { + if (success == false) { + close(); + } + } } public TTestStats get(long bucket) { From 474bac80103243e989b04654a50a91c0d656c0b6 Mon Sep 17 00:00:00 2001 From: weizijun Date: Tue, 23 Nov 2021 16:34:15 +0800 Subject: [PATCH 2/7] add ignore info (#80924) As https://github.com/elastic/elasticsearch/issues/80918 mentions that org.elasticsearch.index.TimeSeriesModeIT.testAddTimeStampMeta will random failed. I run the failed gradle cmd, but it is ok. I first ignore the test, and continue to see what's wrong --- .../java/org/elasticsearch/index/TimeSeriesModeIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/TimeSeriesModeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/TimeSeriesModeIT.java index 74f58fe2364ae..657772a4d3e58 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/TimeSeriesModeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/TimeSeriesModeIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.index; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.DocWriteResponse.Result; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.index.IndexResponse; @@ -486,6 +487,7 @@ public void testEnabledTimeStampMapper() throws IOException { assertThat(getMappingsResponse.getMappings().get(index).source().string(), equalTo(Strings.toString(expect))); } + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/80918") public void testAddTimeStampMeta() throws IOException { Settings s = Settings.builder() .put(IndexSettings.MODE.getKey(), "time_series") From 7db06c110bff7306899674e764248494f4616837 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 23 Nov 2021 09:51:09 +0000 Subject: [PATCH 3/7] Fix shadowed vars pt6 (#80899) Part of #19752. Fix more instances where local variable names were shadowing field names. --- .../plugins/cli/InstallPluginAction.java | 4 +- .../plugins/cli/SyncPluginsAction.java | 4 +- .../plugins/cli/InstallPluginActionTests.java | 49 ++++++++------- .../upgrades/FullClusterRestartIT.java | 29 +++++---- .../packaging/test/PackagingTestCase.java | 6 +- .../packaging/util/Distribution.java | 8 +-- .../packaging/util/docker/DockerRun.java | 14 ++--- .../RetentionLeasesReplicationTests.java | 8 +-- .../cluster/DiskUsageIntegTestCase.java | 18 +++--- .../MockInternalClusterInfoService.java | 20 +++--- .../AbstractCoordinatorTestCase.java | 14 ++--- .../index/engine/EngineTestCase.java | 1 + .../ESIndexLevelReplicationTestCase.java | 63 +++++++++---------- .../script/MockScriptEngine.java | 8 +-- .../elasticsearch/test/BackgroundIndexer.java | 4 +- .../test/ExternalTestCluster.java | 16 ++--- .../test/InternalTestCluster.java | 30 ++++----- .../org/elasticsearch/test/TestCluster.java | 4 +- .../elasticsearch/test/TestSearchContext.java | 38 +++++------ .../test/disruption/NetworkDisruption.java | 30 ++++----- .../test/disruption/SingleNodeDisruption.java | 20 +++--- .../test/rest/ESRestTestCase.java | 41 ++++++------ .../test/rest/FakeRestRequest.java | 16 ++--- .../test/rest/yaml/ObjectPath.java | 26 ++++---- .../yaml/restspec/ClientYamlSuiteRestApi.java | 16 ++--- .../test/rest/yaml/section/DoSection.java | 4 +- .../test/transport/FakeTransport.java | 8 +-- .../test/transport/MockTransport.java | 10 +-- .../analytics/boxplot/InternalBoxplot.java | 28 ++++----- .../multiterms/InternalMultiTerms.java | 21 ++++--- .../MultiTermsAggregationFactory.java | 10 +-- .../multiterms/MultiTermsAggregator.java | 6 +- .../normalize/NormalizePipelineMethods.java | 7 ++- .../rate/AbstractRateAggregator.java | 8 +-- .../xpack/analytics/rate/InternalRate.java | 8 +-- .../stringstats/InternalStringStats.java | 1 + .../TopMetricsAggregationBuilder.java | 2 +- .../xpack/search/AsyncSearchTask.java | 4 +- .../xpack/search/MutableSearchResponse.java | 11 ++-- .../xpack/async/AsyncResultsIndexPlugin.java | 2 +- .../xpack/autoscaling/Autoscaling.java | 21 +++---- .../autoscaling/AutoscalingMetadata.java | 6 +- .../action/PutAutoscalingPolicyAction.java | 6 +- ...ransportDeleteAutoscalingPolicyAction.java | 4 +- .../TransportPutAutoscalingPolicyAction.java | 4 +- .../memory/AutoscalingMemoryInfoService.java | 4 +- .../autoscaling/policy/AutoscalingPolicy.java | 6 +- .../ReactiveStorageDeciderService.java | 6 +- .../AutoscalingMemoryInfoServiceTests.java | 10 +-- .../ReactiveStorageDeciderDecisionTests.java | 15 +++-- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 7 ++- .../ccr/action/AutoFollowCoordinator.java | 26 ++++---- .../xpack/ccr/action/ShardFollowNodeTask.java | 9 +-- .../ccr/action/ShardFollowTasksExecutor.java | 4 +- .../ccr/action/TransportPutFollowAction.java | 4 +- .../xpack/ccr/repository/CcrRepository.java | 18 +++--- .../ShardFollowTaskReplicationTests.java | 12 ++-- .../datastreams/DataStreamsSnapshotsIT.java | 16 ++--- .../logging/DeprecationIndexingAppender.java | 6 +- 59 files changed, 403 insertions(+), 398 deletions(-) diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java index 0d3d31129d78e..75eedf4a6b84c 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/InstallPluginAction.java @@ -467,8 +467,8 @@ Path downloadZip(String urlString, Path tmpDir) throws IOException { } // for testing only - void setEnvironment(Environment env) { - this.env = env; + void setEnvironment(Environment environment) { + this.env = environment; } // for testing only diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java index edcc65ee60ede..ec12b3cedd3be 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/SyncPluginsAction.java @@ -113,7 +113,7 @@ public void execute() throws Exception { // @VisibleForTesting PluginChanges getPluginChanges(PluginsConfig pluginsConfig, Optional cachedPluginsConfig) throws PluginSyncException { - final List existingPlugins = getExistingPlugins(this.env); + final List existingPlugins = getExistingPlugins(); final List pluginsThatShouldExist = pluginsConfig.getPlugins(); final List pluginsThatActuallyExist = existingPlugins.stream() @@ -228,7 +228,7 @@ private List getPluginsToUpgrade( }).collect(Collectors.toList()); } - private List getExistingPlugins(Environment env) throws PluginSyncException { + private List getExistingPlugins() throws PluginSyncException { final List plugins = new ArrayList<>(); try { diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index f8c8ef7e680f6..c926a4b986744 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -291,15 +291,15 @@ void installPlugin(PluginDescriptor plugin, Path home, InstallPluginAction actio } void installPlugins(final List plugins, final Path home, final InstallPluginAction action) throws Exception { - final Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); - action.setEnvironment(env); + final Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build()); + action.setEnvironment(environment); action.execute(plugins); } - void assertPlugin(String name, Path original, Environment env) throws IOException { - assertPluginInternal(name, env.pluginsFile(), original); - assertConfigAndBin(name, original, env); - assertInstallCleaned(env); + void assertPlugin(String name, Path original, Environment environment) throws IOException { + assertPluginInternal(name, environment.pluginsFile(), original); + assertConfigAndBin(name, original, environment); + assertInstallCleaned(environment); } void assertPluginInternal(String name, Path pluginsFile, Path originalPlugin) throws IOException { @@ -331,9 +331,9 @@ void assertPluginInternal(String name, Path pluginsFile, Path originalPlugin) th assertFalse("config was not copied", Files.exists(got.resolve("config"))); } - void assertConfigAndBin(String name, Path original, Environment env) throws IOException { + void assertConfigAndBin(String name, Path original, Environment environment) throws IOException { if (Files.exists(original.resolve("bin"))) { - Path binDir = env.binFile().resolve(name); + Path binDir = environment.binFile().resolve(name); assertTrue("bin dir exists", Files.exists(binDir)); assertTrue("bin is a dir", Files.isDirectory(binDir)); try (DirectoryStream stream = Files.newDirectoryStream(binDir)) { @@ -347,7 +347,7 @@ void assertConfigAndBin(String name, Path original, Environment env) throws IOEx } } if (Files.exists(original.resolve("config"))) { - Path configDir = env.configFile().resolve(name); + Path configDir = environment.configFile().resolve(name); assertTrue("config dir exists", Files.exists(configDir)); assertTrue("config is a dir", Files.isDirectory(configDir)); @@ -355,7 +355,7 @@ void assertConfigAndBin(String name, Path original, Environment env) throws IOEx GroupPrincipal group = null; if (isPosix) { - PosixFileAttributes configAttributes = Files.getFileAttributeView(env.configFile(), PosixFileAttributeView.class) + PosixFileAttributes configAttributes = Files.getFileAttributeView(environment.configFile(), PosixFileAttributeView.class) .readAttributes(); user = configAttributes.owner(); group = configAttributes.group(); @@ -383,8 +383,8 @@ void assertConfigAndBin(String name, Path original, Environment env) throws IOEx } } - void assertInstallCleaned(Environment env) throws IOException { - try (DirectoryStream stream = Files.newDirectoryStream(env.pluginsFile())) { + void assertInstallCleaned(Environment environment) throws IOException { + try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".installing")) { fail("Installation dir still exists, " + file); @@ -598,22 +598,22 @@ public void testBinPermissions() throws Exception { public void testPluginPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); - final Path pluginDir = createPluginDir(temp); - final Path resourcesDir = pluginDir.resolve("resources"); - final Path platformDir = pluginDir.resolve("platform"); + final Path tempPluginDir = createPluginDir(temp); + final Path resourcesDir = tempPluginDir.resolve("resources"); + final Path platformDir = tempPluginDir.resolve("platform"); final Path platformNameDir = platformDir.resolve("linux-x86_64"); final Path platformBinDir = platformNameDir.resolve("bin"); Files.createDirectories(platformBinDir); - Files.createFile(pluginDir.resolve("fake-" + Version.CURRENT.toString() + ".jar")); + Files.createFile(tempPluginDir.resolve("fake-" + Version.CURRENT.toString() + ".jar")); Files.createFile(platformBinDir.resolve("fake_executable")); Files.createDirectory(resourcesDir); Files.createFile(resourcesDir.resolve("resource")); - final PluginDescriptor pluginZip = createPluginZip("fake", pluginDir); + final PluginDescriptor pluginZip = createPluginZip("fake", tempPluginDir); installPlugin(pluginZip); - assertPlugin("fake", pluginDir, env.v2()); + assertPlugin("fake", tempPluginDir, env.v2()); final Path fake = env.v2().pluginsFile().resolve("fake"); final Path resources = fake.resolve("resources"); @@ -729,9 +729,9 @@ public void testZipRelativeOutsideEntryName() throws Exception { } public void testOfficialPluginsHelpSortedAndMissingObviouslyWrongPlugins() throws Exception { - MockTerminal terminal = new MockTerminal(); - new MockInstallPluginCommand().main(new String[] { "--help" }, terminal); - try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) { + MockTerminal mockTerminal = new MockTerminal(); + new MockInstallPluginCommand().main(new String[] { "--help" }, mockTerminal); + try (BufferedReader reader = new BufferedReader(new StringReader(mockTerminal.getOutput()))) { String line = reader.readLine(); // first find the beginning of our list of official plugins @@ -1360,7 +1360,8 @@ private String signature(final byte[] bytes, final PGPSecretKey secretKey) { // checks the plugin requires a policy confirmation, and does not install when that is rejected by the user // the plugin is installed after this method completes - private void assertPolicyConfirmation(Tuple env, PluginDescriptor pluginZip, String... warnings) throws Exception { + private void assertPolicyConfirmation(Tuple pathEnvironmentTuple, PluginDescriptor pluginZip, String... warnings) + throws Exception { for (int i = 0; i < warnings.length; ++i) { String warning = warnings[i]; for (int j = 0; j < i; ++j) { @@ -1372,7 +1373,7 @@ private void assertPolicyConfirmation(Tuple env, PluginDescri assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(env.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } @@ -1385,7 +1386,7 @@ private void assertPolicyConfirmation(Tuple env, PluginDescri e = expectThrows(UserException.class, () -> installPlugin(pluginZip)); assertThat(e.getMessage(), containsString("installation aborted by user")); assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning)); - try (Stream fileStream = Files.list(env.v2().pluginsFile())) { + try (Stream fileStream = Files.list(pathEnvironmentTuple.v2().pluginsFile())) { assertThat(fileStream.collect(Collectors.toList()), empty()); } } diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index ed41c04931faa..745276d3c4145 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -782,7 +782,7 @@ public void testSingleDoc() throws IOException { * Tests that a single empty shard index is correctly recovered. Empty shards are often an edge case. */ public void testEmptyShard() throws IOException { - final String index = "test_empty_shard"; + final String indexName = "test_empty_shard"; if (isRunningAgainstOldCluster()) { Settings.Builder settings = Settings.builder() @@ -794,9 +794,9 @@ public void testEmptyShard() throws IOException { // before timing out .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster - createIndex(index, settings.build()); + createIndex(indexName, settings.build()); } - ensureGreen(index); + ensureGreen(indexName); } /** @@ -1165,21 +1165,24 @@ public void testClosedIndices() throws Exception { * that the index has started shards. */ @SuppressWarnings("unchecked") - private void assertClosedIndex(final String index, final boolean checkRoutingTable) throws IOException { + private void assertClosedIndex(final String indexName, final boolean checkRoutingTable) throws IOException { final Map state = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state"))); - final Map metadata = (Map) XContentMapValues.extractValue("metadata.indices." + index, state); + final Map metadata = (Map) XContentMapValues.extractValue("metadata.indices." + indexName, state); assertThat(metadata, notNullValue()); assertThat(metadata.get("state"), equalTo("close")); - final Map blocks = (Map) XContentMapValues.extractValue("blocks.indices." + index, state); + final Map blocks = (Map) XContentMapValues.extractValue("blocks.indices." + indexName, state); assertThat(blocks, notNullValue()); assertThat(blocks.containsKey(String.valueOf(MetadataIndexStateService.INDEX_CLOSED_BLOCK_ID)), is(true)); final Map settings = (Map) XContentMapValues.extractValue("settings", metadata); assertThat(settings, notNullValue()); - final Map routingTable = (Map) XContentMapValues.extractValue("routing_table.indices." + index, state); + final Map routingTable = (Map) XContentMapValues.extractValue( + "routing_table.indices." + indexName, + state + ); if (checkRoutingTable) { assertThat(routingTable, notNullValue()); assertThat(Booleans.parseBoolean((String) XContentMapValues.extractValue("index.verified_before_close", settings)), is(true)); @@ -1198,7 +1201,7 @@ private void assertClosedIndex(final String index, final boolean checkRoutingTab for (Map shard : shards) { assertThat(XContentMapValues.extractValue("shard", shard), equalTo(i)); assertThat(XContentMapValues.extractValue("state", shard), equalTo("STARTED")); - assertThat(XContentMapValues.extractValue("index", shard), equalTo(index)); + assertThat(XContentMapValues.extractValue("index", shard), equalTo(indexName)); } } } else { @@ -1353,12 +1356,12 @@ private String loadInfoDocument(String id) throws IOException { return m.group(1); } - private List dataNodes(String index, RestClient client) throws IOException { - Request request = new Request("GET", index + "/_stats"); + private List dataNodes(String indexName, RestClient client) throws IOException { + Request request = new Request("GET", indexName + "/_stats"); request.addParameter("level", "shards"); Response response = client.performRequest(request); List nodes = new ArrayList<>(); - List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); + List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + indexName + ".shards.0"); for (Object shard : shardStats) { final String nodeId = ObjectPath.evaluate(shard, "routing.node"); nodes.add(nodeId); @@ -1370,8 +1373,8 @@ private List dataNodes(String index, RestClient client) throws IOExcepti * Wait for an index to have green health, waiting longer than * {@link ESRestTestCase#ensureGreen}. */ - protected void ensureGreenLongWait(String index) throws IOException { - Request request = new Request("GET", "/_cluster/health/" + index); + protected void ensureGreenLongWait(String indexName) throws IOException { + Request request = new Request("GET", "/_cluster/health/" + indexName); request.addParameter("timeout", "2m"); request.addParameter("wait_for_status", "green"); request.addParameter("wait_for_no_relocating_shards", "true"); diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index e2f2344e3f907..a38935caf90af 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -126,12 +126,12 @@ public abstract class PackagingTestCase extends Assert { // the java installation already installed on the system protected static final String systemJavaHome; static { - Shell sh = new Shell(); + Shell initShell = new Shell(); if (Platforms.WINDOWS) { - systemJavaHome = sh.run("$Env:SYSTEM_JAVA_HOME").stdout.trim(); + systemJavaHome = initShell.run("$Env:SYSTEM_JAVA_HOME").stdout.trim(); } else { assert Platforms.LINUX || Platforms.DARWIN; - systemJavaHome = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim(); + systemJavaHome = initShell.run("echo $SYSTEM_JAVA_HOME").stdout.trim(); } } diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/Distribution.java index 48c978cad62a0..f3fd1becab900 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -43,12 +43,8 @@ public Distribution(Path path) { this.platform = filename.contains("windows") ? Platform.WINDOWS : Platform.LINUX; this.hasJdk = filename.contains("no-jdk") == false; - String version = filename.split("-", 3)[1]; - this.baseVersion = version; - if (filename.contains("-SNAPSHOT")) { - version += "-SNAPSHOT"; - } - this.version = version; + this.baseVersion = filename.split("-", 3)[1]; + this.version = filename.contains("-SNAPSHOT") ? this.baseVersion + "-SNAPSHOT" : this.baseVersion; } public boolean isArchive() { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index fdf4201c96f12..30401d978ba2a 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -70,18 +70,18 @@ public DockerRun volume(Path from, Path to) { /** * Sets the UID that the container is run with, and the GID too if specified. * - * @param uid the UID to use, or {@code null} to use the image default - * @param gid the GID to use, or {@code null} to use the image default + * @param uidToUse the UID to use, or {@code null} to use the image default + * @param gidToUse the GID to use, or {@code null} to use the image default * @return the current builder */ - public DockerRun uid(Integer uid, Integer gid) { - if (uid == null) { - if (gid != null) { + public DockerRun uid(Integer uidToUse, Integer gidToUse) { + if (uidToUse == null) { + if (gidToUse != null) { throw new IllegalArgumentException("Cannot override GID without also overriding UID"); } } - this.uid = uid; - this.gid = gid; + this.uid = uidToUse; + this.gid = gidToUse; return this; } diff --git a/server/src/test/java/org/elasticsearch/index/replication/RetentionLeasesReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RetentionLeasesReplicationTests.java index 266b5b1b2608f..3d22ea609d811 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RetentionLeasesReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RetentionLeasesReplicationTests.java @@ -75,8 +75,8 @@ public void testOutOfOrderRetentionLeasesRequests() throws Exception { IndexMetadata indexMetadata = buildIndexMetadata(numberOfReplicas, settings, indexMapping); try (ReplicationGroup group = new ReplicationGroup(indexMetadata) { @Override - protected void syncRetentionLeases(ShardId shardId, RetentionLeases leases, ActionListener listener) { - listener.onResponse(new SyncRetentionLeasesResponse(new RetentionLeaseSyncAction.Request(shardId, leases))); + protected void syncRetentionLeases(ShardId id, RetentionLeases leases, ActionListener listener) { + listener.onResponse(new SyncRetentionLeasesResponse(new RetentionLeaseSyncAction.Request(id, leases))); } }) { group.startAll(); @@ -102,8 +102,8 @@ public void testSyncRetentionLeasesWithPrimaryPromotion() throws Exception { IndexMetadata indexMetadata = buildIndexMetadata(numberOfReplicas, settings, indexMapping); try (ReplicationGroup group = new ReplicationGroup(indexMetadata) { @Override - protected void syncRetentionLeases(ShardId shardId, RetentionLeases leases, ActionListener listener) { - listener.onResponse(new SyncRetentionLeasesResponse(new RetentionLeaseSyncAction.Request(shardId, leases))); + protected void syncRetentionLeases(ShardId id, RetentionLeases leases, ActionListener listener) { + listener.onResponse(new SyncRetentionLeasesResponse(new RetentionLeaseSyncAction.Request(id, leases))); } }) { group.startAll(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java index 99de3ca43dc28..29930f8bdd996 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java @@ -114,11 +114,11 @@ public String name() { @Override public long getTotalSpace() throws IOException { - final long totalSpace = this.totalSpace; - if (totalSpace == -1) { + final long totalSpaceCopy = this.totalSpace; + if (totalSpaceCopy == -1) { return super.getTotalSpace(); } else { - return totalSpace; + return totalSpaceCopy; } } @@ -129,21 +129,21 @@ public void setTotalSpace(long totalSpace) { @Override public long getUsableSpace() throws IOException { - final long totalSpace = this.totalSpace; - if (totalSpace == -1) { + final long totalSpaceCopy = this.totalSpace; + if (totalSpaceCopy == -1) { return super.getUsableSpace(); } else { - return Math.max(0L, totalSpace - getTotalFileSize(path)); + return Math.max(0L, totalSpaceCopy - getTotalFileSize(path)); } } @Override public long getUnallocatedSpace() throws IOException { - final long totalSpace = this.totalSpace; - if (totalSpace == -1) { + final long totalSpaceCopy = this.totalSpace; + if (totalSpaceCopy == -1) { return super.getUnallocatedSpace(); } else { - return Math.max(0L, totalSpace - getTotalFileSize(path)); + return Math.max(0L, totalSpaceCopy - getTotalFileSize(path)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index f346f012632e1..1d50a7ddfcfb3 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -40,13 +40,13 @@ public MockInternalClusterInfoService(Settings settings, ClusterService clusterS super(settings, clusterService, threadPool, client); } - public void setDiskUsageFunctionAndRefresh(BiFunction diskUsageFunction) { - this.diskUsageFunction = diskUsageFunction; + public void setDiskUsageFunctionAndRefresh(BiFunction diskUsageFn) { + this.diskUsageFunction = diskUsageFn; ClusterInfoServiceUtils.refresh(this); } - public void setShardSizeFunctionAndRefresh(Function shardSizeFunction) { - this.shardSizeFunction = shardSizeFunction; + public void setShardSizeFunctionAndRefresh(Function shardSizeFn) { + this.shardSizeFunction = shardSizeFn; ClusterInfoServiceUtils.refresh(this); } @@ -58,8 +58,8 @@ public ClusterInfo getClusterInfo() { @Override List adjustNodesStats(List nodesStats) { - final BiFunction diskUsageFunction = this.diskUsageFunction; - if (diskUsageFunction == null) { + final BiFunction diskUsageFunctionCopy = this.diskUsageFunction; + if (diskUsageFunctionCopy == null) { return nodesStats; } @@ -78,7 +78,7 @@ List adjustNodesStats(List nodesStats) { oldFsInfo.getTimestamp(), oldFsInfo.getIoStats(), StreamSupport.stream(oldFsInfo.spliterator(), false) - .map(fsInfoPath -> diskUsageFunction.apply(discoveryNode, fsInfoPath)) + .map(fsInfoPath -> diskUsageFunctionCopy.apply(discoveryNode, fsInfoPath)) .toArray(FsInfo.Path[]::new) ), nodeStats.getTransport(), @@ -108,12 +108,12 @@ class SizeFakingClusterInfo extends ClusterInfo { @Override public Long getShardSize(ShardRouting shardRouting) { - final Function shardSizeFunction = MockInternalClusterInfoService.this.shardSizeFunction; - if (shardSizeFunction == null) { + final Function shardSizeFunctionCopy = MockInternalClusterInfoService.this.shardSizeFunction; + if (shardSizeFunctionCopy == null) { return super.getShardSize(shardRouting); } - return shardSizeFunction.apply(shardRouting); + return shardSizeFunctionCopy.apply(shardRouting); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index cc41ca120ab87..150670bd29739 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -811,14 +811,14 @@ ClusterNode getAnyNode() { return getAnyNodeExcept(); } - ClusterNode getAnyNodeExcept(ClusterNode... clusterNodes) { - List filteredNodes = getAllNodesExcept(clusterNodes); + ClusterNode getAnyNodeExcept(ClusterNode... clusterNodesToExclude) { + List filteredNodes = getAllNodesExcept(clusterNodesToExclude); assert filteredNodes.isEmpty() == false; return randomFrom(filteredNodes); } - List getAllNodesExcept(ClusterNode... clusterNodes) { - Set forbiddenIds = Arrays.stream(clusterNodes).map(ClusterNode::getId).collect(Collectors.toSet()); + List getAllNodesExcept(ClusterNode... clusterNodesToExclude) { + Set forbiddenIds = Arrays.stream(clusterNodesToExclude).map(ClusterNode::getId).collect(Collectors.toSet()); return this.clusterNodes.stream().filter(n -> forbiddenIds.contains(n.getId()) == false).collect(Collectors.toList()); } @@ -1258,7 +1258,7 @@ ClusterNode restartedNode() { ClusterNode restartedNode( Function adaptGlobalMetadata, Function adaptCurrentTerm, - Settings nodeSettings + Settings settings ) { final Set allExceptVotingOnlyRole = DiscoveryNodeRole.roles() .stream() @@ -1273,7 +1273,7 @@ ClusterNode restartedNode( address.getAddress(), address, Collections.emptyMap(), - localNode.isMasterNode() && DiscoveryNode.isMasterNode(nodeSettings) ? allExceptVotingOnlyRole : emptySet(), + localNode.isMasterNode() && DiscoveryNode.isMasterNode(settings) ? allExceptVotingOnlyRole : emptySet(), Version.CURRENT ); try { @@ -1281,7 +1281,7 @@ ClusterNode restartedNode( nodeIndex, newLocalNode, node -> new MockPersistedState(newLocalNode, persistedState, adaptGlobalMetadata, adaptCurrentTerm), - nodeSettings, + settings, nodeHealthService ); } finally { diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 6cad713d58392..218ce75dd3ad8 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -141,6 +141,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; +@SuppressWarnings("HiddenField") public abstract class EngineTestCase extends ESTestCase { protected final ShardId shardId = new ShardId(new Index("index", "_na_"), 0); diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 41b2a2eed8244..2ac75c2c77b81 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -185,13 +185,13 @@ protected class ReplicationGroup implements AutoCloseable, Iterable ); private final RetentionLeaseSyncer retentionLeaseSyncer = new RetentionLeaseSyncer( - (shardId, primaryAllocationId, primaryTerm, retentionLeases, listener) -> syncRetentionLeases( - shardId, + (_shardId, primaryAllocationId, primaryTerm, retentionLeases, listener) -> syncRetentionLeases( + _shardId, retentionLeases, listener ), - (shardId, primaryAllocationId, primaryTerm, retentionLeases) -> syncRetentionLeases( - shardId, + (_shardId, primaryAllocationId, primaryTerm, retentionLeases) -> syncRetentionLeases( + _shardId, retentionLeases, ActionListener.wrap(r -> {}, e -> { throw new AssertionError("failed to background sync retention lease", e); }) ) @@ -208,13 +208,13 @@ protected ReplicationGroup(final IndexMetadata indexMetadata) throws IOException } } - private ShardRouting createShardRouting(String nodeId, boolean primary) { + private ShardRouting createShardRouting(String nodeId, boolean isPrimary) { return TestShardRouting.newShardRouting( shardId, nodeId, - primary, + isPrimary, ShardRoutingState.INITIALIZING, - primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE + isPrimary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ); } @@ -337,10 +337,10 @@ assert shardRoutings().stream().anyMatch(shardRouting -> shardRouting.isSameAllo updateAllocationIDsOnPrimary(); } - protected synchronized void recoverPrimary(IndexShard primary) { - final DiscoveryNode pNode = getDiscoveryNode(primary.routingEntry().currentNodeId()); - primary.markAsRecovering("store", new RecoveryState(primary.routingEntry(), pNode, null)); - recoverFromStore(primary); + protected synchronized void recoverPrimary(IndexShard primaryShard) { + final DiscoveryNode pNode = getDiscoveryNode(primaryShard.routingEntry().currentNodeId()); + primaryShard.markAsRecovering("store", new RecoveryState(primaryShard.routingEntry(), pNode, null)); + recoverFromStore(primaryShard); } public synchronized IndexShard addReplicaWithExistingPath(final ShardPath shardPath, final String nodeId) throws IOException { @@ -401,7 +401,7 @@ public void onFailure(Exception e) { public synchronized void promoteReplicaToPrimary( IndexShard replica, - BiConsumer> primaryReplicaSyncer + BiConsumer> primaryReplicaSyncerArg ) throws IOException { final long newTerm = indexMetadata.primaryTerm(shardId.id()) + 1; IndexMetadata.Builder newMetadata = IndexMetadata.builder(indexMetadata).primaryTerm(shardId.id(), newTerm); @@ -416,7 +416,7 @@ public synchronized void promoteReplicaToPrimary( primary.updateShardState( primaryRouting, newTerm, - primaryReplicaSyncer, + primaryReplicaSyncerArg, currentClusterStateVersion.incrementAndGet(), activeIds(), routingTable @@ -584,12 +584,9 @@ private ReplicationTargets getReplicationTargets() { return replicationTargets; } - protected void syncRetentionLeases(ShardId shardId, RetentionLeases leases, ActionListener listener) { - new SyncRetentionLeases( - new RetentionLeaseSyncAction.Request(shardId, leases), - this, - listener.map(r -> new ReplicationResponse()) - ).execute(); + protected void syncRetentionLeases(ShardId id, RetentionLeases leases, ActionListener listener) { + new SyncRetentionLeases(new RetentionLeaseSyncAction.Request(id, leases), this, listener.map(r -> new ReplicationResponse())) + .execute(); } public synchronized RetentionLease addRetentionLease( @@ -717,8 +714,8 @@ public void failShard(String message, Exception exception) { } @Override - public void perform(Request request, ActionListener listener) { - performOnPrimary(getPrimaryShard(), request, listener); + public void perform(Request replicationRequest, ActionListener primaryResultListener) { + performOnPrimary(getPrimaryShard(), replicationRequest, primaryResultListener); } @Override @@ -767,20 +764,20 @@ class ReplicasRef implements ReplicationOperation.Replicas { @Override public void performOn( final ShardRouting replicaRouting, - final ReplicaRequest request, + final ReplicaRequest replicaRequest, final long primaryTerm, final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes, - final ActionListener listener + final ActionListener replicaResponseListener ) { IndexShard replica = replicationTargets.findReplicaShard(replicaRouting); replica.acquireReplicaOperationPermit( getPrimaryShard().getPendingPrimaryTerm(), globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, - listener.delegateFailure((delegatedListener, releasable) -> { + replicaResponseListener.delegateFailure((delegatedListener, releasable) -> { try { - performOnReplica(request, replica); + performOnReplica(replicaRequest, replica); releasable.close(); delegatedListener.onResponse( new ReplicaResponse(replica.getLocalCheckpoint(), replica.getLastKnownGlobalCheckpoint()) @@ -791,7 +788,7 @@ public void performOn( } }), ThreadPool.Names.WRITE, - request + replicaRequest ); } @@ -801,19 +798,19 @@ public void failShardIfNeeded( long primaryTerm, String message, Exception exception, - ActionListener listener + ActionListener actionListener ) { throw new UnsupportedOperationException("failing shard " + replica + " isn't supported. failure: " + message, exception); } @Override public void markShardCopyAsStaleIfNeeded( - ShardId shardId, + ShardId id, String allocationId, long primaryTerm, - ActionListener listener + ActionListener actionListener ) { - throw new UnsupportedOperationException("can't mark " + shardId + ", aid [" + allocationId + "] as stale"); + throw new UnsupportedOperationException("can't mark " + id + ", aid [" + allocationId + "] as stale"); } } @@ -837,8 +834,8 @@ public void setShardInfo(ReplicationResponse.ShardInfo shardInfo) { } @Override - public void runPostReplicationActions(ActionListener listener) { - listener.onResponse(null); + public void runPostReplicationActions(ActionListener actionListener) { + actionListener.onResponse(null); } } @@ -884,7 +881,7 @@ private void executeShardBulkOnPrimary( final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); primary.acquirePrimaryOperationPermit(permitAcquiredFuture, ThreadPool.Names.SAME, request); try (Releasable ignored = permitAcquiredFuture.actionGet()) { - MappingUpdatePerformer noopMappingUpdater = (update, shardId, listener1) -> {}; + MappingUpdatePerformer noopMappingUpdater = (_update, _shardId, _listener1) -> {}; TransportShardBulkAction.performOnPrimary( request, primary, diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index b3a83d4f9bcc8..8438cf68179bc 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -68,12 +68,12 @@ public MockScriptEngine( Map, ContextCompiler> contexts ) { - Map scripts = new HashMap<>(deterministicScripts.size() + nonDeterministicScripts.size()); - deterministicScripts.forEach((key, value) -> scripts.put(key, MockDeterministicScript.asDeterministic(value))); - nonDeterministicScripts.forEach((key, value) -> scripts.put(key, MockDeterministicScript.asNonDeterministic(value))); + Map scriptMap = new HashMap<>(deterministicScripts.size() + nonDeterministicScripts.size()); + deterministicScripts.forEach((key, value) -> scriptMap.put(key, MockDeterministicScript.asDeterministic(value))); + nonDeterministicScripts.forEach((key, value) -> scriptMap.put(key, MockDeterministicScript.asNonDeterministic(value))); this.type = type; - this.scripts = Collections.unmodifiableMap(scripts); + this.scripts = Collections.unmodifiableMap(scriptMap); this.contexts = Collections.unmodifiableMap(contexts); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index fa0785954d186..0b60d832a9228 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -248,8 +248,8 @@ private XContentBuilder generateSource(long id, Random random) throws IOExceptio private volatile TimeValue timeout = BulkShardRequest.DEFAULT_TIMEOUT; - public void setRequestTimeout(TimeValue timeout) { - this.timeout = timeout; + public void setRequestTimeout(TimeValue requestTimeout) { + this.timeout = requestTimeout; } private volatile boolean ignoreIndexingFailures; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index d2ec6bf4ee0d9..a729664116729 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -105,11 +105,11 @@ public ExternalTestCluster( pluginClasses = new ArrayList<>(pluginClasses); pluginClasses.add(MockHttpTransport.TestPlugin.class); Settings clientSettings = clientSettingsBuilder.build(); - MockNode node = new MockNode(clientSettings, pluginClasses); - Client client = clientWrapper.apply(node.client()); + MockNode mockNode = new MockNode(clientSettings, pluginClasses); + Client wrappedClient = clientWrapper.apply(mockNode.client()); try { - node.start(); - NodesInfoResponse nodeInfos = client.admin().cluster().prepareNodesInfo().clear().setSettings(true).setHttp(true).get(); + mockNode.start(); + NodesInfoResponse nodeInfos = wrappedClient.admin().cluster().prepareNodesInfo().clear().setSettings(true).setHttp(true).get(); httpAddresses = new InetSocketAddress[nodeInfos.getNodes().size()]; int dataNodes = 0; int masterAndDataNodes = 0; @@ -125,20 +125,20 @@ public ExternalTestCluster( } this.numDataNodes = dataNodes; this.numMasterAndDataNodes = masterAndDataNodes; - this.client = client; - this.node = node; + this.client = wrappedClient; + this.node = mockNode; logger.info("Setup ExternalTestCluster [{}] made of [{}] nodes", nodeInfos.getClusterName().value(), size()); } catch (NodeValidationException e) { try { - IOUtils.close(client, node); + IOUtils.close(wrappedClient, mockNode); } catch (IOException e1) { e.addSuppressed(e1); } throw new ElasticsearchException(e); } catch (Exception e) { try { - IOUtils.close(client, node); + IOUtils.close(wrappedClient, mockNode); } catch (IOException e1) { e.addSuppressed(e1); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index c444a31e990a4..5ad0c9489354e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -901,7 +901,7 @@ private final class NodeAndClient implements Closeable { this.name = name; this.originalNodeSettings = originalNodeSettings; this.nodeAndClientId = nodeAndClientId; - markNodeDataDirsAsNotEligibleForWipe(node); + markNodeDataDirsAsNotEligibleForWipe(); } Node node() { @@ -1018,7 +1018,7 @@ public void afterStart() { } }); closed.set(false); - markNodeDataDirsAsNotEligibleForWipe(node); + markNodeDataDirsAsNotEligibleForWipe(); } @Override @@ -1028,7 +1028,7 @@ public void close() throws IOException { resetClient(); } finally { closed.set(true); - markNodeDataDirsAsPendingForWipe(node); + markNodeDataDirsAsPendingForWipe(); node.close(); try { if (node.awaitClose(10, TimeUnit.SECONDS) == false) { @@ -1040,17 +1040,17 @@ public void close() throws IOException { } } - private void markNodeDataDirsAsPendingForWipe(Node node) { + private void markNodeDataDirsAsPendingForWipe() { assert Thread.holdsLock(InternalTestCluster.this); - NodeEnvironment nodeEnv = node.getNodeEnvironment(); + NodeEnvironment nodeEnv = this.node.getNodeEnvironment(); if (nodeEnv.hasNodeFile()) { dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); } } - private void markNodeDataDirsAsNotEligibleForWipe(Node node) { + private void markNodeDataDirsAsNotEligibleForWipe() { assert Thread.holdsLock(InternalTestCluster.this); - NodeEnvironment nodeEnv = node.getNodeEnvironment(); + NodeEnvironment nodeEnv = this.node.getNodeEnvironment(); if (nodeEnv.hasNodeFile()) { dataDirToClean.removeAll(Arrays.asList(nodeEnv.nodeDataPaths())); } @@ -1984,14 +1984,14 @@ public synchronized Set nodesInclude(String index) { if (clusterService().state().routingTable().hasIndex(index)) { List allShards = clusterService().state().routingTable().allShards(index); DiscoveryNodes discoveryNodes = clusterService().state().getNodes(); - Set nodes = new HashSet<>(); + Set nodeNames = new HashSet<>(); for (ShardRouting shardRouting : allShards) { if (shardRouting.assignedToNode()) { DiscoveryNode discoveryNode = discoveryNodes.get(shardRouting.currentNodeId()); - nodes.add(discoveryNode.getName()); + nodeNames.add(discoveryNode.getName()); } } - return nodes; + return nodeNames; } return Collections.emptySet(); } @@ -2088,7 +2088,7 @@ public List startNodes(int numOfNodes, Settings settings) { */ public synchronized List startNodes(Settings... extraSettings) { final int newMasterCount = Math.toIntExact(Stream.of(extraSettings).filter(DiscoveryNode::isMasterNode).count()); - final List nodes = new ArrayList<>(); + final List nodeList = new ArrayList<>(); final int prevMasterCount = getMasterNodesCount(); int autoBootstrapMasterNodeIndex = autoManageMasterNodes && prevMasterCount == 0 @@ -2127,15 +2127,15 @@ public synchronized List startNodes(Settings... extraSettings) { firstNodeId + i, builder.put(nodeSettings).build(), false, - () -> rebuildUnicastHostFiles(nodes) + () -> rebuildUnicastHostFiles(nodeList) ); - nodes.add(nodeAndClient); + nodeList.add(nodeAndClient); } - startAndPublishNodesAndClients(nodes); + startAndPublishNodesAndClients(nodeList); if (autoManageMasterNodes) { validateClusterFormed(); } - return nodes.stream().map(NodeAndClient::getName).collect(Collectors.toList()); + return nodeList.stream().map(NodeAndClient::getName).collect(Collectors.toList()); } public List startMasterOnlyNodes(int numNodes) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index 3d551d3dfe064..ab6fd837c6b82 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -53,8 +53,8 @@ public long seed() { /** * This method should be executed before each test to reset the cluster to its initial state. */ - public void beforeTest(Random random) throws IOException, InterruptedException { - this.random = new Random(random.nextLong()); + public void beforeTest(Random randomGenerator) throws IOException, InterruptedException { + this.random = new Random(randomGenerator.nextLong()); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 90c7b8a23b9ad..42f073a8a079b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -121,7 +121,7 @@ public void setSearcher(ContextIndexSearcher searcher) { public void preProcess() {} @Override - public Query buildFilteredQuery(Query query) { + public Query buildFilteredQuery(Query q) { return null; } @@ -166,8 +166,8 @@ public SearchContextAggregations aggregations() { } @Override - public SearchContext aggregations(SearchContextAggregations aggregations) { - this.aggregations = aggregations; + public SearchContext aggregations(SearchContextAggregations searchContextAggregations) { + this.aggregations = searchContextAggregations; return this; } @@ -302,8 +302,8 @@ public Float minimumScore() { } @Override - public SearchContext sort(SortAndFormats sort) { - this.sort = sort; + public SearchContext sort(SortAndFormats sortAndFormats) { + this.sort = sortAndFormats; return this; } @@ -313,8 +313,8 @@ public SortAndFormats sort() { } @Override - public SearchContext trackScores(boolean trackScores) { - this.trackScores = trackScores; + public SearchContext trackScores(boolean shouldTrackScores) { + this.trackScores = shouldTrackScores; return this; } @@ -324,8 +324,8 @@ public boolean trackScores() { } @Override - public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { - this.trackTotalHitsUpTo = trackTotalHitsUpTo; + public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpToValue) { + this.trackTotalHitsUpTo = trackTotalHitsUpToValue; return this; } @@ -335,8 +335,8 @@ public int trackTotalHitsUpTo() { } @Override - public SearchContext searchAfter(FieldDoc searchAfter) { - this.searchAfter = searchAfter; + public SearchContext searchAfter(FieldDoc searchAfterDoc) { + this.searchAfter = searchAfterDoc; return this; } @@ -356,8 +356,8 @@ public CollapseContext collapse() { } @Override - public SearchContext parsedPostFilter(ParsedQuery postFilter) { - this.postFilter = postFilter; + public SearchContext parsedPostFilter(ParsedQuery postFilterQuery) { + this.postFilter = postFilterQuery; return this; } @@ -367,9 +367,9 @@ public ParsedQuery parsedPostFilter() { } @Override - public SearchContext parsedQuery(ParsedQuery query) { - this.originalQuery = query; - this.query = query.query(); + public SearchContext parsedQuery(ParsedQuery parsedQuery) { + this.originalQuery = parsedQuery; + this.query = parsedQuery.query(); return this; } @@ -389,8 +389,8 @@ public int from() { } @Override - public SearchContext from(int from) { - this.from = from; + public SearchContext from(int fromValue) { + this.from = fromValue; return this; } @@ -404,7 +404,7 @@ public void setSize(int size) { } @Override - public SearchContext size(int size) { + public SearchContext size(int sizeValue) { return null; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java index 77811ce5f59fa..da04419d62bc0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java @@ -59,28 +59,28 @@ public NetworkLinkDisruptionType getNetworkLinkDisruptionType() { } @Override - public void applyToCluster(InternalTestCluster cluster) { - this.cluster = cluster; + public void applyToCluster(InternalTestCluster testCluster) { + this.cluster = testCluster; } @Override - public void removeFromCluster(InternalTestCluster cluster) { + public void removeFromCluster(InternalTestCluster testCluster) { stopDisrupting(); } @Override - public void removeAndEnsureHealthy(InternalTestCluster cluster) { - removeFromCluster(cluster); - ensureHealthy(cluster); + public void removeAndEnsureHealthy(InternalTestCluster testCluster) { + removeFromCluster(testCluster); + ensureHealthy(testCluster); } /** * ensures the cluster is healthy after the disruption */ - public void ensureHealthy(InternalTestCluster cluster) { + public void ensureHealthy(InternalTestCluster testCluster) { assert activeDisruption == false; - ensureNodeCount(cluster); - ensureFullyConnectedCluster(cluster); + ensureNodeCount(testCluster); + ensureFullyConnectedCluster(testCluster); } /** @@ -105,20 +105,20 @@ public static void ensureFullyConnectedCluster(InternalTestCluster cluster) { } } - protected void ensureNodeCount(InternalTestCluster cluster) { - cluster.validateClusterFormed(); + protected void ensureNodeCount(InternalTestCluster testCluster) { + testCluster.validateClusterFormed(); } @Override - public synchronized void applyToNode(String node, InternalTestCluster cluster) { + public synchronized void applyToNode(String node, InternalTestCluster testCluster) { } @Override - public synchronized void removeFromNode(String node1, InternalTestCluster cluster) { + public synchronized void removeFromNode(String node1, InternalTestCluster testCluster) { logger.info("stop disrupting node (disruption type: {}, disrupted links: {})", networkLinkDisruptionType, disruptedLinks); - applyToNodes(new String[] { node1 }, cluster.getNodeNames(), networkLinkDisruptionType::removeDisruption); - applyToNodes(cluster.getNodeNames(), new String[] { node1 }, networkLinkDisruptionType::removeDisruption); + applyToNodes(new String[] { node1 }, testCluster.getNodeNames(), networkLinkDisruptionType::removeDisruption); + applyToNodes(testCluster.getNodeNames(), new String[] { node1 }, networkLinkDisruptionType::removeDisruption); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java index 8aa73f7871435..a70afedb6f221 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java @@ -28,28 +28,28 @@ public SingleNodeDisruption(Random random) { } @Override - public void applyToCluster(InternalTestCluster cluster) { - this.cluster = cluster; + public void applyToCluster(InternalTestCluster testCluster) { + this.cluster = testCluster; if (disruptedNode == null) { - String[] nodes = cluster.getNodeNames(); + String[] nodes = testCluster.getNodeNames(); disruptedNode = nodes[random.nextInt(nodes.length)]; } } @Override - public void removeFromCluster(InternalTestCluster cluster) { + public void removeFromCluster(InternalTestCluster testCluster) { if (disruptedNode != null) { - removeFromNode(disruptedNode, cluster); + removeFromNode(disruptedNode, testCluster); } } @Override - public synchronized void applyToNode(String node, InternalTestCluster cluster) { + public synchronized void applyToNode(String node, InternalTestCluster testCluster) { } @Override - public synchronized void removeFromNode(String node, InternalTestCluster cluster) { + public synchronized void removeFromNode(String node, InternalTestCluster testCluster) { if (disruptedNode == null) { return; } @@ -65,14 +65,14 @@ public synchronized void testClusterClosed() { disruptedNode = null; } - protected void ensureNodeCount(InternalTestCluster cluster) { + protected void ensureNodeCount(InternalTestCluster testCluster) { assertFalse( "cluster failed to form after disruption was healed", - cluster.client() + testCluster.client() .admin() .cluster() .prepareHealth() - .setWaitForNodes(String.valueOf(cluster.size())) + .setWaitForNodes(String.valueOf(testCluster.size())) .setWaitForNoRelocatingShards(true) .get() .isTimedOut() diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index ed44973fb3656..b462137b133c0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -294,10 +294,10 @@ public void current(String... requiredWarnings) { /** * Adds to the set of warnings that are permissible (but not required) when running * in mixed-version clusters or those that differ in version from the test client. - * @param allowedWarnings optional warnings that will be ignored if received + * @param allowedWarningsToAdd optional warnings that will be ignored if received */ - public void compatible(String... allowedWarnings) { - this.allowedWarnings.addAll(Arrays.asList(allowedWarnings)); + public void compatible(String... allowedWarningsToAdd) { + this.allowedWarnings.addAll(Arrays.asList(allowedWarningsToAdd)); } @Override @@ -422,11 +422,11 @@ protected static RestClient adminClient() { * Wait for outstanding tasks to complete. The specified admin client is used to check the outstanding tasks and this is done using * {@link ESTestCase#assertBusy(CheckedRunnable)} to give a chance to any outstanding tasks to complete. * - * @param adminClient the admin client + * @param restClient the admin client * @throws Exception if an exception is thrown while checking the outstanding tasks */ - public static void waitForPendingTasks(final RestClient adminClient) throws Exception { - waitForPendingTasks(adminClient, taskName -> false); + public static void waitForPendingTasks(final RestClient restClient) throws Exception { + waitForPendingTasks(restClient, taskName -> false); } /** @@ -434,16 +434,16 @@ public static void waitForPendingTasks(final RestClient adminClient) throws Exce * {@link ESTestCase#assertBusy(CheckedRunnable)} to give a chance to any outstanding tasks to complete. The specified filter is used * to filter out outstanding tasks that are expected to be there. * - * @param adminClient the admin client + * @param restClient the admin client * @param taskFilter predicate used to filter tasks that are expected to be there * @throws Exception if an exception is thrown while checking the outstanding tasks */ - public static void waitForPendingTasks(final RestClient adminClient, final Predicate taskFilter) throws Exception { + public static void waitForPendingTasks(final RestClient restClient, final Predicate taskFilter) throws Exception { assertBusy(() -> { try { final Request request = new Request("GET", "/_cat/tasks"); request.addParameter("detailed", "true"); - final Response response = adminClient.performRequest(request); + final Response response = restClient.performRequest(request); /* * Check to see if there are outstanding tasks; we exclude the list task itself, and any expected outstanding tasks using * the specified task filter. @@ -1430,15 +1430,15 @@ public static void ensureHealth(String index, Consumer requestConsumer) ensureHealth(client(), index, requestConsumer); } - protected static void ensureHealth(RestClient client, String index, Consumer requestConsumer) throws IOException { + protected static void ensureHealth(RestClient restClient, String index, Consumer requestConsumer) throws IOException { Request request = new Request("GET", "/_cluster/health" + (index.isBlank() ? "" : "/" + index)); requestConsumer.accept(request); try { - client.performRequest(request); + restClient.performRequest(request); } catch (ResponseException e) { if (e.getResponse().getStatusLine().getStatusCode() == HttpStatus.SC_REQUEST_TIMEOUT) { try { - final Response clusterStateResponse = client.performRequest(new Request("GET", "/_cluster/state?pretty")); + final Response clusterStateResponse = restClient.performRequest(new Request("GET", "/_cluster/state?pretty")); fail( "timed out waiting for green state for index [" + index @@ -1496,9 +1496,9 @@ protected static void deleteIndex(String name) throws IOException { deleteIndex(client(), name); } - protected static void deleteIndex(RestClient client, String name) throws IOException { + protected static void deleteIndex(RestClient restClient, String name) throws IOException { Request request = new Request("DELETE", "/" + name); - client.performRequest(request); + restClient.performRequest(request); } protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException { @@ -1613,13 +1613,13 @@ protected static void registerRepository(String repository, String type, boolean registerRepository(client(), repository, type, verify, settings); } - protected static void registerRepository(RestClient client, String repository, String type, boolean verify, Settings settings) + protected static void registerRepository(RestClient restClient, String repository, String type, boolean verify, Settings settings) throws IOException { final Request request = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repository); request.addParameter("verify", Boolean.toString(verify)); request.setJsonEntity(Strings.toString(new PutRepositoryRequest(repository).type(type).settings(settings))); - final Response response = client.performRequest(request); + final Response response = restClient.performRequest(request); assertAcked("Failed to create repository [" + repository + "] of type [" + type + "]: " + response, response); } @@ -1627,12 +1627,12 @@ protected static void createSnapshot(String repository, String snapshot, boolean createSnapshot(client(), repository, snapshot, waitForCompletion); } - protected static void createSnapshot(RestClient client, String repository, String snapshot, boolean waitForCompletion) + protected static void createSnapshot(RestClient restClient, String repository, String snapshot, boolean waitForCompletion) throws IOException { final Request request = new Request(HttpPut.METHOD_NAME, "_snapshot/" + repository + '/' + snapshot); request.addParameter("wait_for_completion", Boolean.toString(waitForCompletion)); - final Response response = client.performRequest(request); + final Response response = restClient.performRequest(request); assertThat( "Failed to create snapshot [" + snapshot + "] in repository [" + repository + "]: " + response, response.getStatusLine().getStatusCode(), @@ -1656,12 +1656,13 @@ protected static void deleteSnapshot(String repository, String snapshot, boolean deleteSnapshot(client(), repository, snapshot, ignoreMissing); } - protected static void deleteSnapshot(RestClient client, String repository, String snapshot, boolean ignoreMissing) throws IOException { + protected static void deleteSnapshot(RestClient restClient, String repository, String snapshot, boolean ignoreMissing) + throws IOException { final Request request = new Request(HttpDelete.METHOD_NAME, "_snapshot/" + repository + '/' + snapshot); if (ignoreMissing) { request.addParameter("ignore", "404"); } - final Response response = client.performRequest(request); + final Response response = restClient.performRequest(request); assertThat(response.getStatusLine().getStatusCode(), ignoreMissing ? anyOf(equalTo(200), equalTo(404)) : equalTo(200)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 66bad2fe602fd..32902b49b515b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -111,17 +111,17 @@ public HttpRequest removeHeader(String header) { } @Override - public HttpResponse createResponse(RestStatus status, BytesReference content) { - Map headers = new HashMap<>(); + public HttpResponse createResponse(RestStatus status, BytesReference unused) { + Map responseHeaders = new HashMap<>(); return new HttpResponse() { @Override public void addHeader(String name, String value) { - headers.put(name, value); + responseHeaders.put(name, value); } @Override public boolean containsHeader(String name) { - return headers.containsKey(name); + return responseHeaders.containsKey(name); } }; } @@ -212,8 +212,8 @@ public Builder withParams(Map params) { return this; } - public Builder withContent(BytesReference content, XContentType xContentType) { - this.content = content; + public Builder withContent(BytesReference contentBytes, XContentType xContentType) { + this.content = contentBytes; if (xContentType != null) { headers.put("Content-Type", Collections.singletonList(xContentType.mediaType())); } @@ -230,8 +230,8 @@ public Builder withMethod(Method method) { return this; } - public Builder withRemoteAddress(InetSocketAddress address) { - this.address = address; + public Builder withRemoteAddress(InetSocketAddress remoteAddress) { + this.address = remoteAddress; return this; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java index 0bf67360e565d..06cff22d70394 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ObjectPath.java @@ -24,7 +24,7 @@ import java.util.Map; /** - * Holds an object and allows to extract specific values from it given their path + * Holds an object and allows extraction of specific values from it, given their path */ public class ObjectPath { @@ -77,24 +77,24 @@ public T evaluate(String path) throws IOException { @SuppressWarnings("unchecked") public T evaluate(String path, Stash stash) throws IOException { String[] parts = parsePath(path); - Object object = this.object; + Object result = this.object; for (String part : parts) { - object = evaluate(part, object, stash); - if (object == null) { + result = evaluate(part, result, stash); + if (result == null) { return null; } } - return (T) object; + return (T) result; } @SuppressWarnings("unchecked") - private Object evaluate(String key, Object object, Stash stash) throws IOException { + private Object evaluate(String key, Object objectToEvaluate, Stash stash) throws IOException { if (stash.containsStashedValue(key)) { key = stash.getValue(key).toString(); } - if (object instanceof Map) { - final Map objectAsMap = (Map) object; + if (objectToEvaluate instanceof Map) { + final Map objectAsMap = (Map) objectToEvaluate; if ("_arbitrary_key_".equals(key)) { if (objectAsMap.isEmpty()) { throw new IllegalArgumentException("requested [" + key + "] but the map was empty"); @@ -106,10 +106,10 @@ private Object evaluate(String key, Object object, Stash stash) throws IOExcepti } return objectAsMap.get(key); } - if (object instanceof List) { - List list = (List) object; + if (objectToEvaluate instanceof List) { + List list = (List) objectToEvaluate; try { - return list.get(Integer.valueOf(key)); + return list.get(Integer.parseInt(key)); } catch (NumberFormatException e) { throw new IllegalArgumentException("element was a list, but [" + key + "] was not numeric", e); } catch (IndexOutOfBoundsException e) { @@ -120,7 +120,9 @@ private Object evaluate(String key, Object object, Stash stash) throws IOExcepti } } - throw new IllegalArgumentException("no object found for [" + key + "] within object of class [" + object.getClass() + "]"); + throw new IllegalArgumentException( + "no object found for [" + key + "] within object of class [" + objectToEvaluate.getClass() + "]" + ); } private String[] parsePath(String path) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApi.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApi.java index 13c099af6e4ef..b6264e0a6d5e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApi.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApi.java @@ -166,11 +166,11 @@ public List getRequestMimeTypes() { * - /{index}/_alias/{name}, /{index}/_aliases/{name} * - /{index}/{type}/_mapping, /{index}/{type}/_mappings, /{index}/_mappings/{type}, /{index}/_mapping/{type} */ - public List getBestMatchingPaths(Set params) { + public List getBestMatchingPaths(Set pathParams) { PriorityQueue> queue = new PriorityQueue<>(Comparator.comparing(Tuple::v1, (a, b) -> Integer.compare(b, a))); for (ClientYamlSuiteRestApi.Path path : paths) { int matches = 0; - for (String actualParameter : params) { + for (String actualParameter : pathParams) { if (path.getParts().contains(actualParameter)) { matches++; } @@ -180,17 +180,17 @@ public List getBestMatchingPaths(Set params } } if (queue.isEmpty()) { - throw new IllegalStateException("Unable to find a matching path for api [" + name + "]" + params); + throw new IllegalStateException("Unable to find a matching path for api [" + name + "]" + pathParams); } - List paths = new ArrayList<>(); + List pathsByRelevance = new ArrayList<>(); Tuple poll = queue.poll(); int maxMatches = poll.v1(); do { - paths.add(poll.v2()); + pathsByRelevance.add(poll.v2()); poll = queue.poll(); } while (poll != null && poll.v1() == maxMatches); - return paths; + return pathsByRelevance; } public static class Path { @@ -224,8 +224,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - Path path = (Path) o; - return this.path.equals(path.path); + Path other = (Path) o; + return this.path.equals(other.path); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 35f29168d3623..5cabfc3bb4bc4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -252,8 +252,8 @@ public String getCatch() { return catchParam; } - public void setCatch(String catchParam) { - this.catchParam = catchParam; + public void setCatch(String param) { + this.catchParam = param; } public ApiCallSection getApiCallSection() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/FakeTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/FakeTransport.java index 4f750d9d563ee..d17b7aa021078 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/FakeTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/FakeTransport.java @@ -35,11 +35,11 @@ public class FakeTransport extends AbstractLifecycleComponent implements Transpo private TransportMessageListener listener; @Override - public void setMessageListener(TransportMessageListener listener) { + public void setMessageListener(TransportMessageListener messageListener) { if (this.listener != null) { throw new IllegalStateException("listener already set"); } - this.listener = listener; + this.listener = messageListener; } @Override @@ -63,8 +63,8 @@ public List getDefaultSeedAddresses() { } @Override - public void openConnection(DiscoveryNode node, ConnectionProfile profile, ActionListener listener) { - listener.onResponse(new CloseableConnection() { + public void openConnection(DiscoveryNode node, ConnectionProfile profile, ActionListener actionListener) { + actionListener.onResponse(new CloseableConnection() { @Override public DiscoveryNode getNode() { return node; diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index 8d6c078fd12c5..55d2e66bdcc0d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -74,7 +74,9 @@ public TransportService createTransportService( public MockTransport() { super(new FakeTransport()); - setDefaultConnectBehavior((transport, discoveryNode, profile, listener) -> listener.onResponse(createConnection(discoveryNode))); + setDefaultConnectBehavior( + (transport, discoveryNode, profile, actionListener) -> actionListener.onResponse(createConnection(discoveryNode)) + ); } /** @@ -172,12 +174,12 @@ public void sendRequest(long requestId, String action, TransportRequest request, protected void onSendRequest(long requestId, String action, TransportRequest request, DiscoveryNode node) {} @Override - public void setMessageListener(TransportMessageListener listener) { + public void setMessageListener(TransportMessageListener messageListener) { if (this.listener != null) { throw new IllegalStateException("listener already set"); } - this.listener = listener; - super.setMessageListener(listener); + this.listener = messageListener; + super.setMessageListener(messageListener); } protected NamedWriteableRegistry writeableRegistry() { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java index 477ea5c9b6ba6..b731e992b8e3b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/InternalBoxplot.java @@ -44,8 +44,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return state == null ? Double.NEGATIVE_INFINITY : state.getMin(); + double value(TDigestState digestState) { + return digestState == null ? Double.NEGATIVE_INFINITY : digestState.getMin(); } }, MAX { @@ -55,8 +55,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return state == null ? Double.POSITIVE_INFINITY : state.getMax(); + double value(TDigestState digestState) { + return digestState == null ? Double.POSITIVE_INFINITY : digestState.getMax(); } }, Q1 { @@ -66,8 +66,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return state == null ? Double.NaN : state.quantile(0.25); + double value(TDigestState digestState) { + return digestState == null ? Double.NaN : digestState.quantile(0.25); } }, Q2 { @@ -77,8 +77,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return state == null ? Double.NaN : state.quantile(0.5); + double value(TDigestState digestState) { + return digestState == null ? Double.NaN : digestState.quantile(0.5); } }, Q3 { @@ -88,8 +88,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return state == null ? Double.NaN : state.quantile(0.75); + double value(TDigestState digestState) { + return digestState == null ? Double.NaN : digestState.quantile(0.75); } }, LOWER { @@ -99,8 +99,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return whiskers(state)[0]; + double value(TDigestState digestState) { + return whiskers(digestState)[0]; } }, UPPER { @@ -110,8 +110,8 @@ enum Metrics { } @Override - double value(TDigestState state) { - return whiskers(state)[1]; + double value(TDigestState digestState) { + return whiskers(digestState)[1]; } }; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 9a55a991fb771..d43c15582e9c5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -350,6 +350,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { } @Override + @SuppressWarnings("HiddenField") protected InternalMultiTerms create( String name, List buckets, @@ -415,11 +416,13 @@ protected int getRequiredSize() { } @Override + @SuppressWarnings("HiddenField") protected Bucket createBucket(long docCount, InternalAggregations aggs, long docCountError, Bucket prototype) { return new Bucket(prototype.terms, docCount, aggs, prototype.showDocCountError, docCountError, formats, keyConverters); } @Override + @SuppressWarnings("HiddenField") public InternalMultiTerms create(List buckets) { return new InternalMultiTerms( name, @@ -493,9 +496,9 @@ private boolean[] needsPromotionToDouble(List aggregations) private InternalAggregation promoteToDouble(InternalAggregation aggregation, boolean[] needsPromotion) { InternalMultiTerms multiTerms = (InternalMultiTerms) aggregation; - List buckets = multiTerms.getBuckets(); + List multiTermsBuckets = multiTerms.getBuckets(); List> newKeys = new ArrayList<>(); - for (InternalMultiTerms.Bucket bucket : buckets) { + for (InternalMultiTerms.Bucket bucket : multiTermsBuckets) { newKeys.add(new ArrayList<>(bucket.terms.size())); } @@ -505,20 +508,20 @@ private InternalAggregation promoteToDouble(InternalAggregation aggregation, boo DocValueFormat format = formats.get(i); if (needsPromotion[i]) { newKeyConverters.add(KeyConverter.DOUBLE); - for (int j = 0; j < buckets.size(); j++) { - newKeys.get(j).add(converter.toDouble(format, buckets.get(j).terms.get(i))); + for (int j = 0; j < multiTermsBuckets.size(); j++) { + newKeys.get(j).add(converter.toDouble(format, multiTermsBuckets.get(j).terms.get(i))); } } else { newKeyConverters.add(converter); - for (int j = 0; j < buckets.size(); j++) { - newKeys.get(j).add(buckets.get(j).terms.get(i)); + for (int j = 0; j < multiTermsBuckets.size(); j++) { + newKeys.get(j).add(multiTermsBuckets.get(j).terms.get(i)); } } } - List newBuckets = new ArrayList<>(buckets.size()); - for (int i = 0; i < buckets.size(); i++) { - Bucket oldBucket = buckets.get(i); + List newBuckets = new ArrayList<>(multiTermsBuckets.size()); + for (int i = 0; i < multiTermsBuckets.size(); i++) { + Bucket oldBucket = multiTermsBuckets.get(i); newBuckets.add( new Bucket( newKeys.get(i), diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java index 2642f5c3c6111..fb9d1f1f5f35e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationFactory.java @@ -57,15 +57,15 @@ public MultiTermsAggregationFactory( @Override protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(this.bucketCountThresholds); + TermsAggregator.BucketCountThresholds thresholds = new TermsAggregator.BucketCountThresholds(this.bucketCountThresholds); if (InternalOrder.isKeyOrder(order) == false - && bucketCountThresholds.getShardSize() == MultiTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { + && thresholds.getShardSize() == MultiTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { // The user has not made a shardSize selection. Use default // heuristic to avoid any wrong-ranking caused by distributed // counting - bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); + thresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(thresholds.getRequiredSize())); } - bucketCountThresholds.ensureValidity(); + thresholds.ensureValidity(); return new MultiTermsAggregator( name, factories, @@ -76,7 +76,7 @@ protected Aggregator createInternal(Aggregator parent, CardinalityUpperBound car showTermDocCountError, order, collectMode, - bucketCountThresholds, + thresholds, cardinality, metadata ); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 65f26c391612b..2e445389cb7c5 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -146,11 +146,11 @@ List termValuesList(LeafReaderContext ctx) throws IOException { List> docTerms(List termValuesList, int doc) throws IOException { List> terms = new ArrayList<>(); for (TermValues termValues : termValuesList) { - List values = termValues.collectValues(doc); - if (values == null) { + List collectValues = termValues.collectValues(doc); + if (collectValues == null) { return null; } - terms.add(values); + terms.add(collectValues); } return terms; } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java index 7b7f11846ee00..150dcfeeb8a50 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineMethods.java @@ -94,14 +94,14 @@ static class Softmax implements DoubleUnaryOperator { private double sumExp; Softmax(double[] values) { - double sumExp = 0.0; + double _sumExp = 0.0; for (Double value : values) { if (value.isNaN() == false) { - sumExp += Math.exp(value); + _sumExp += Math.exp(value); } } - this.sumExp = sumExp; + this.sumExp = _sumExp; } @Override @@ -117,6 +117,7 @@ abstract static class SinglePassSimpleStatisticsMethod implements DoubleUnaryOpe protected final double mean; protected final int count; + @SuppressWarnings("HiddenField") SinglePassSimpleStatisticsMethod(double[] values) { int count = 0; double sum = 0.0; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java index 5105ee73729ca..263969b59e932 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/AbstractRateAggregator.java @@ -61,20 +61,20 @@ public AbstractRateAggregator( } private SizedBucketAggregator findSizedBucketAncestor() { - SizedBucketAggregator sizedBucketAggregator = null; + SizedBucketAggregator aggregator = null; for (Aggregator ancestor = parent; ancestor != null; ancestor = ancestor.parent()) { if (ancestor instanceof SizedBucketAggregator) { - sizedBucketAggregator = (SizedBucketAggregator) ancestor; + aggregator = (SizedBucketAggregator) ancestor; break; } } - if (sizedBucketAggregator == null) { + if (aggregator == null) { throw new IllegalArgumentException( "The rate aggregation can only be used inside a date histogram aggregation or " + "composite aggregation with one date histogram value source" ); } - return sizedBucketAggregator; + return aggregator; } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java index c221fc612336e..4181e25a2864c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java @@ -72,15 +72,15 @@ public InternalRate reduce(List aggregations, ReduceContext // Compute the sum of double values with Kahan summation algorithm which is more // accurate than naive summation. CompensatedSum kahanSummation = new CompensatedSum(0, 0); - Double divisor = null; + Double firstDivisor = null; for (InternalAggregation aggregation : aggregations) { double value = ((InternalRate) aggregation).sum; kahanSummation.add(value); - if (divisor == null) { - divisor = ((InternalRate) aggregation).divisor; + if (firstDivisor == null) { + firstDivisor = ((InternalRate) aggregation).divisor; } } - return new InternalRate(name, kahanSummation.value(), divisor, format, getMetadata()); + return new InternalRate(name, kahanSummation.value(), firstDivisor, format, getMetadata()); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java index b39cc0fda4e9c..e21a99fecaec4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/InternalStringStats.java @@ -199,6 +199,7 @@ public Object value(String name) { } @Override + @SuppressWarnings("HiddenField") public InternalStringStats reduce(List aggregations, ReduceContext reduceContext) { long count = 0; long totalLength = 0; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java index bdf28c33786fe..3d634ecfb2d21 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregationBuilder.java @@ -142,7 +142,7 @@ public TopMetricsAggregationBuilder( */ public TopMetricsAggregationBuilder(StreamInput in) throws IOException { super(in); - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "HiddenField" }) List> sortBuilders = (List>) (List) in.readNamedWriteableList(SortBuilder.class); this.sortBuilders = sortBuilders; this.size = in.readVInt(); diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index 924581f57c829..d7027859a9107 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -132,8 +132,8 @@ Listener getSearchProgressActionListener() { * Update the expiration time of the (partial) response. */ @Override - public void setExpirationTime(long expirationTimeMillis) { - this.expirationTimeMillis = expirationTimeMillis; + public void setExpirationTime(long expirationTime) { + this.expirationTimeMillis = expirationTime; } @Override diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java index 482679085650b..fc67c1bc7886d 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java @@ -85,6 +85,7 @@ class MutableSearchResponse { * Updates the response with the result of a partial reduction. * @param reducedAggs is a strategy for producing the reduced aggs */ + @SuppressWarnings("HiddenField") synchronized void updatePartialResponse( int successfulShards, TotalHits totalHits, @@ -138,11 +139,11 @@ synchronized void updateWithFailure(ElasticsearchException exc) { /** * Adds a shard failure concurrently (non-blocking). */ - void addQueryFailure(int shardIndex, ShardSearchFailure failure) { + void addQueryFailure(int shardIndex, ShardSearchFailure shardSearchFailure) { synchronized (this) { failIfFrozen(); } - queryFailures.set(shardIndex, failure); + queryFailures.set(shardIndex, shardSearchFailure); } private SearchResponse buildResponse(long taskStartTimeNanos, InternalAggregations reducedAggs) { @@ -290,9 +291,9 @@ private ShardSearchFailure[] buildQueryFailures() { } List failures = new ArrayList<>(); for (int i = 0; i < queryFailures.length(); i++) { - ShardSearchFailure failure = queryFailures.get(i); - if (failure != null) { - failures.add(failure); + ShardSearchFailure shardSearchFailure = queryFailures.get(i); + if (shardSearchFailure != null) { + failures.add(shardSearchFailure); } } return failures.toArray(ShardSearchFailure[]::new); diff --git a/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java b/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java index 1031228a31f37..416ef4809344a 100644 --- a/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java +++ b/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java @@ -43,7 +43,7 @@ public AsyncResultsIndexPlugin(Settings settings) { } @Override - public Collection getSystemIndexDescriptors(Settings settings) { + public Collection getSystemIndexDescriptors(Settings unused) { return List.of(AsyncTaskIndexService.getSystemIndexDescriptor()); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java index 3cdb18c7ae24d..7245531b87ac3 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java @@ -88,7 +88,7 @@ public class Autoscaling extends Plugin implements ActionPlugin, ExtensiblePlugi ); private final List autoscalingExtensions; - private final SetOnce clusterService = new SetOnce<>(); + private final SetOnce clusterServiceHolder = new SetOnce<>(); private final SetOnce allocationDeciders = new SetOnce<>(); private final AutoscalingLicenseChecker autoscalingLicenseChecker; @@ -115,7 +115,7 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - this.clusterService.set(clusterService); + this.clusterServiceHolder.set(clusterService); return List.of( new AutoscalingCalculateCapacityService.Holder(this), autoscalingLicenseChecker, @@ -209,26 +209,19 @@ public void loadExtensions(ExtensionLoader loader) { @Override public Collection deciders() { assert allocationDeciders.get() != null; + final ClusterService clusterService = clusterServiceHolder.get(); return List.of( new FixedAutoscalingDeciderService(), - new ReactiveStorageDeciderService( - clusterService.get().getSettings(), - clusterService.get().getClusterSettings(), - allocationDeciders.get() - ), - new ProactiveStorageDeciderService( - clusterService.get().getSettings(), - clusterService.get().getClusterSettings(), - allocationDeciders.get() - ), + new ReactiveStorageDeciderService(clusterService.getSettings(), clusterService.getClusterSettings(), allocationDeciders.get()), + new ProactiveStorageDeciderService(clusterService.getSettings(), clusterService.getClusterSettings(), allocationDeciders.get()), new FrozenShardsDeciderService(), new FrozenStorageDeciderService(), new FrozenExistenceDeciderService() ); } - public Set createDeciderServices(AllocationDeciders allocationDeciders) { - this.allocationDeciders.set(allocationDeciders); + public Set createDeciderServices(AllocationDeciders deciders) { + this.allocationDeciders.set(deciders); return autoscalingExtensions.stream().flatMap(p -> p.deciders().stream()).collect(Collectors.toSet()); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java index 2489063244e2b..a5a80d15661ab 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java @@ -74,12 +74,12 @@ public AutoscalingMetadata(final SortedMap po public AutoscalingMetadata(final StreamInput in) throws IOException { final int size = in.readVInt(); - final SortedMap policies = new TreeMap<>(); + final SortedMap policiesMap = new TreeMap<>(); for (int i = 0; i < size; i++) { final AutoscalingPolicyMetadata policyMetadata = new AutoscalingPolicyMetadata(in); - policies.put(policyMetadata.policy().name(), policyMetadata); + policiesMap.put(policyMetadata.policy().name(), policyMetadata); } - this.policies = policies; + this.policies = policiesMap; } @Override diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java index 0a1f92960fa73..64b29e1d83eaa 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/PutAutoscalingPolicyAction.java @@ -93,11 +93,11 @@ public Request(final StreamInput in) throws IOException { } if (in.readBoolean()) { int deciderCount = in.readInt(); - SortedMap deciders = new TreeMap<>(); + SortedMap decidersMap = new TreeMap<>(); for (int i = 0; i < deciderCount; ++i) { - deciders.put(in.readString(), Settings.readSettingsFromStream(in)); + decidersMap.put(in.readString(), Settings.readSettingsFromStream(in)); } - this.deciders = Collections.unmodifiableSortedMap(deciders); + this.deciders = Collections.unmodifiableSortedMap(decidersMap); } else { this.deciders = null; } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportDeleteAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportDeleteAutoscalingPolicyAction.java index 6c0f0b6e0d500..a6fcc13294cb2 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportDeleteAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportDeleteAutoscalingPolicyAction.java @@ -34,7 +34,7 @@ public class TransportDeleteAutoscalingPolicyAction extends AcknowledgedTransportMasterNodeAction { - private static final Logger logger = LogManager.getLogger(TransportPutAutoscalingPolicyAction.class); + private static final Logger LOGGER = LogManager.getLogger(TransportPutAutoscalingPolicyAction.class); @Inject public TransportDeleteAutoscalingPolicyAction( @@ -68,7 +68,7 @@ protected void masterOperation( clusterService.submitStateUpdateTask("delete-autoscaling-policy", new AckedClusterStateUpdateTask(request, listener) { @Override public ClusterState execute(final ClusterState currentState) { - return deleteAutoscalingPolicy(currentState, request.name(), logger); + return deleteAutoscalingPolicy(currentState, request.name(), LOGGER); } }); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportPutAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportPutAutoscalingPolicyAction.java index 71c21f99ed847..915fa51f46f7b 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportPutAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportPutAutoscalingPolicyAction.java @@ -39,7 +39,7 @@ public class TransportPutAutoscalingPolicyAction extends AcknowledgedTransportMasterNodeAction { - private static final Logger logger = LogManager.getLogger(TransportPutAutoscalingPolicyAction.class); + private static final Logger LOGGER = LogManager.getLogger(TransportPutAutoscalingPolicyAction.class); private final PolicyValidator policyValidator; private final AutoscalingLicenseChecker autoscalingLicenseChecker; @@ -104,7 +104,7 @@ protected void masterOperation( clusterService.submitStateUpdateTask("put-autoscaling-policy", new AckedClusterStateUpdateTask(request, listener) { @Override public ClusterState execute(final ClusterState currentState) { - return putAutoscalingPolicy(currentState, request, policyValidator, logger); + return putAutoscalingPolicy(currentState, request, policyValidator, LOGGER); } }); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoService.java index 13e52b96180c5..e3f18f2c1c56e 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoService.java @@ -185,9 +185,9 @@ private void addNodeStats(ImmutableOpenMap.Builder builder, NodeSt } public AutoscalingMemoryInfo snapshot() { - final ImmutableOpenMap nodeToMemory = this.nodeToMemory; + final ImmutableOpenMap nodeToMemoryRef = this.nodeToMemory; return node -> { - Long result = nodeToMemory.get(node.getEphemeralId()); + Long result = nodeToMemoryRef.get(node.getEphemeralId()); // noinspection NumberEquality if (result == FETCHING_SENTINEL) { return null; diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/policy/AutoscalingPolicy.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/policy/AutoscalingPolicy.java index f592ff21a9efe..64b9974f483c4 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/policy/AutoscalingPolicy.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/policy/AutoscalingPolicy.java @@ -90,11 +90,11 @@ public AutoscalingPolicy(final StreamInput in) throws IOException { this.name = in.readString(); this.roles = in.readSet(StreamInput::readString).stream().collect(Sets.toUnmodifiableSortedSet()); int deciderCount = in.readInt(); - SortedMap deciders = new TreeMap<>(); + SortedMap decidersMap = new TreeMap<>(); for (int i = 0; i < deciderCount; ++i) { - deciders.put(in.readString(), Settings.readSettingsFromStream(in)); + decidersMap.put(in.readString(), Settings.readSettingsFromStream(in)); } - this.deciders = Collections.unmodifiableSortedMap(deciders); + this.deciders = Collections.unmodifiableSortedMap(decidersMap); } @Override diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 4a43819356d3d..03a694511787e 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -399,7 +399,7 @@ private IndexMetadata indexMetadata(ShardRouting shard, RoutingAllocation alloca return allocation.metadata().getIndexSafe(shard.index()); } - private Optional highestPreferenceTier(List preferredTiers, DiscoveryNodes nodes) { + private Optional highestPreferenceTier(List preferredTiers, DiscoveryNodes unused) { assert preferredTiers.isEmpty() == false; return Optional.of(preferredTiers.get(0)); } @@ -429,8 +429,8 @@ private long getExpectedShardSize(ShardRouting shard) { } long unmovableSize(String nodeId, Collection shards) { - ClusterInfo info = this.info; - DiskUsage diskUsage = info.getNodeMostAvailableDiskUsages().get(nodeId); + ClusterInfo clusterInfo = this.info; + DiskUsage diskUsage = clusterInfo.getNodeMostAvailableDiskUsages().get(nodeId); if (diskUsage == null) { // do not want to scale up then, since this should only happen when node has just joined (clearly edge case). return 0; diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoServiceTests.java index 2e8089c2f1986..67cb99ca3904b 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/capacity/memory/AutoscalingMemoryInfoServiceTests.java @@ -395,9 +395,9 @@ public void respond(NodesStatsResponse response, Runnable whileFetching) { }); } - public void respond(BiConsumer> responder) { - assertThat(responder, notNullValue()); - this.responder = responder; + public void respond(BiConsumer> responderValue) { + assertThat(responderValue, notNullValue()); + this.responder = responderValue; } @Override @@ -410,11 +410,11 @@ protected void NodesStatsRequest nodesStatsRequest = (NodesStatsRequest) request; assertThat(nodesStatsRequest.timeout(), equalTo(fetchTimeout)); assertThat(responder, notNullValue()); - BiConsumer> responder = this.responder; + BiConsumer> responderValue = this.responder; this.responder = null; @SuppressWarnings("unchecked") ActionListener statsListener = (ActionListener) listener; - responder.accept(nodesStatsRequest, statsListener); + responderValue.accept(nodesStatsRequest, statsListener); } public void assertNoResponder() { diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java index da619f5392d82..5e4fac860182d 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderDecisionTests.java @@ -128,13 +128,13 @@ public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAl @Before public void setup() { - ClusterState state = ClusterState.builder(new ClusterName("test")).build(); - state = addRandomIndices(hotNodes, hotNodes, state); - state = addDataNodes(DATA_HOT_NODE_ROLE, "hot", state, hotNodes); - state = addDataNodes(DATA_WARM_NODE_ROLE, "warm", state, warmNodes); - this.state = state; + ClusterState clusterState = ClusterState.builder(new ClusterName("test")).build(); + clusterState = addRandomIndices(hotNodes, hotNodes, clusterState); + clusterState = addDataNodes(DATA_HOT_NODE_ROLE, "hot", clusterState, hotNodes); + clusterState = addDataNodes(DATA_WARM_NODE_ROLE, "warm", clusterState, warmNodes); + this.state = clusterState; - Set shardIds = shardIds(state.getRoutingNodes().unassigned()); + Set shardIds = shardIds(clusterState.getRoutingNodes().unassigned()); this.subjectShards = new HashSet<>(randomSubsetOf(randomIntBetween(1, shardIds.size()), shardIds)); } @@ -353,8 +353,7 @@ private void verify(VerificationSubject subject, long expected, AllocationDecide } private void verify(VerificationSubject subject, long expected, DiscoveryNodeRole role, AllocationDecider... allocationDeciders) { - ClusterState state = this.state; - verify(state, subject, expected, role, allocationDeciders); + verify(this.state, subject, expected, role, allocationDeciders); } private static void verify( diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index a234153535f97..d3d85d3f1f28e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -171,6 +171,7 @@ public Ccr(final Settings settings) { } @Override + @SuppressWarnings("HiddenField") public Collection createComponents( final Client client, final ClusterService clusterService, @@ -211,6 +212,7 @@ public Collection createComponents( } @Override + @SuppressWarnings("HiddenField") public List> getPersistentTasksExecutor( ClusterService clusterService, ThreadPool threadPool, @@ -271,7 +273,7 @@ public List> getPersistentTasksExecutor( } public List getRestHandlers( - Settings settings, + Settings unused, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, @@ -367,6 +369,7 @@ public Optional getEngineFactory(final IndexSettings indexSetting } } + @SuppressWarnings("HiddenField") public List> getExecutorBuilders(Settings settings) { if (enabled == false) { return Collections.emptyList(); @@ -417,7 +420,7 @@ public Collection> ind } @Override - public Collection createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { + public Collection createAllocationDeciders(Settings unused, ClusterSettings clusterSettings) { return List.of(new CcrPrimaryFollowerAllocationDecider()); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index d2c825da6f3d8..f514694e83396 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -146,9 +146,9 @@ protected void doClose() { } public synchronized AutoFollowStats getStats() { - final Map autoFollowers = this.autoFollowers; + final Map autoFollowersCopy = this.autoFollowers; final TreeMap timesSinceLastAutoFollowPerRemoteCluster = new TreeMap<>(); - for (Map.Entry entry : autoFollowers.entrySet()) { + for (Map.Entry entry : autoFollowersCopy.entrySet()) { long lastAutoFollowTimeInMillis = entry.getValue().lastAutoFollowTimeInMillis; long lastSeenMetadataVersion = entry.getValue().metadataVersion; if (lastAutoFollowTimeInMillis != -1) { @@ -227,13 +227,13 @@ void updateAutoFollowers(ClusterState followerClusterState) { return; } - final CopyOnWriteHashMap autoFollowers = CopyOnWriteHashMap.copyOf(this.autoFollowers); + final CopyOnWriteHashMap autoFollowersCopy = CopyOnWriteHashMap.copyOf(this.autoFollowers); Set newRemoteClusters = autoFollowMetadata.getPatterns() .values() .stream() .filter(AutoFollowPattern::isActive) .map(AutoFollowPattern::getRemoteCluster) - .filter(remoteCluster -> autoFollowers.containsKey(remoteCluster) == false) + .filter(remoteCluster -> autoFollowersCopy.containsKey(remoteCluster) == false) .collect(Collectors.toSet()); Map newAutoFollowers = new HashMap<>(newRemoteClusters.size()); @@ -313,7 +313,7 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS } List removedRemoteClusters = new ArrayList<>(); - for (Map.Entry entry : autoFollowers.entrySet()) { + for (Map.Entry entry : autoFollowersCopy.entrySet()) { String remoteCluster = entry.getKey(); AutoFollower autoFollower = entry.getValue(); boolean exist = autoFollowMetadata.getPatterns() @@ -334,7 +334,7 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS } } assert assertNoOtherActiveAutoFollower(newAutoFollowers); - this.autoFollowers = autoFollowers.copyAndPutAll(newAutoFollowers).copyAndRemoveAll(removedRemoteClusters); + this.autoFollowers = autoFollowersCopy.copyAndPutAll(newAutoFollowers).copyAndRemoveAll(removedRemoteClusters); } private boolean assertNoOtherActiveAutoFollower(Map newAutoFollowers) { @@ -527,7 +527,7 @@ private void autoFollowIndices( private void checkAutoFollowPattern( String autoFollowPattenName, - String remoteCluster, + String remoteClusterString, AutoFollowPattern autoFollowPattern, List leaderIndicesToFollow, Map headers, @@ -603,7 +603,7 @@ private void checkAutoFollowPattern( } else { followLeaderIndex( autoFollowPattenName, - remoteCluster, + remoteClusterString, indexToFollow, autoFollowPattern, headers, @@ -633,7 +633,7 @@ private static boolean leaderIndexAlreadyFollowed(AutoFollowPattern autoFollowPa private void followLeaderIndex( String autoFollowPattenName, - String remoteCluster, + String remoteClusterString, Index indexToFollow, AutoFollowPattern pattern, Map headers, @@ -643,7 +643,7 @@ private void followLeaderIndex( final String followIndexName = getFollowerIndexName(pattern, leaderIndexName); PutFollowAction.Request request = new PutFollowAction.Request(); - request.setRemoteCluster(remoteCluster); + request.setRemoteCluster(remoteClusterString); request.setLeaderIndex(indexToFollow.getName()); request.setFollowerIndex(followIndexName); request.setSettings(pattern.getSettings()); @@ -852,13 +852,13 @@ static class AutoFollowResult { AutoFollowResult(String autoFollowPatternName, List> results) { this.autoFollowPatternName = autoFollowPatternName; - Map autoFollowExecutionResults = new HashMap<>(); + Map mutableAutoFollowExecutionResults = new HashMap<>(); for (Tuple result : results) { - autoFollowExecutionResults.put(result.v1(), result.v2()); + mutableAutoFollowExecutionResults.put(result.v1(), result.v2()); } this.clusterStateFetchException = null; - this.autoFollowExecutionResults = Collections.unmodifiableMap(autoFollowExecutionResults); + this.autoFollowExecutionResults = Collections.unmodifiableMap(mutableAutoFollowExecutionResults); } AutoFollowResult(String autoFollowPatternName, Exception e) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 541af27e6f6af..2e502e30f53f3 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -136,6 +136,7 @@ protected boolean removeEldestEntry(final Map.Entry operations, - long leaderMaxSeqNoOfUpdatesOrDeletes, + long leaderMaxSequenceNoOfUpdatesOrDeletes, AtomicInteger retryCounter ) { - assert leaderMaxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "mus is not replicated"; + assert leaderMaxSequenceNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "mus is not replicated"; final long startTime = relativeTimeProvider.getAsLong(); - innerSendBulkShardOperationsRequest(followerHistoryUUID, operations, leaderMaxSeqNoOfUpdatesOrDeletes, response -> { + innerSendBulkShardOperationsRequest(followerHistoryUUID, operations, leaderMaxSequenceNoOfUpdatesOrDeletes, response -> { synchronized (ShardFollowNodeTask.this) { totalWriteTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); successfulWriteRequests++; @@ -459,7 +460,7 @@ private void sendBulkShardOperationsRequest( handleFailure( e, retryCounter, - () -> sendBulkShardOperationsRequest(operations, leaderMaxSeqNoOfUpdatesOrDeletes, retryCounter) + () -> sendBulkShardOperationsRequest(operations, leaderMaxSequenceNoOfUpdatesOrDeletes, retryCounter) ); }); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 07504e9ce41c0..b5736916240b7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -606,12 +606,12 @@ protected void nodeOperation(final AllocatedPersistentTask task, final ShardFoll } private void fetchFollowerShardInfo( - final Client client, + final Client followerClient, final ShardId shardId, final FollowerStatsInfoHandler handler, final Consumer errorHandler ) { - client.admin().indices().stats(new IndicesStatsRequest().indices(shardId.getIndexName()), ActionListener.wrap(r -> { + followerClient.admin().indices().stats(new IndicesStatsRequest().indices(shardId.getIndexName()), ActionListener.wrap(r -> { IndexStats indexStats = r.getIndex(shardId.getIndexName()); if (indexStats == null) { IndexMetadata indexMetadata = clusterService.state().metadata().index(shardId.getIndex()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 16931cd330a3c..ae5dbdeef564f 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -274,7 +274,7 @@ public void onFailure(Exception e) { } private void initiateFollowing( - final Client client, + final Client clientWithHeaders, final PutFollowAction.Request request, final ActionListener listener ) { @@ -283,7 +283,7 @@ private void initiateFollowing( ResumeFollowAction.Request resumeFollowRequest = new ResumeFollowAction.Request(); resumeFollowRequest.setFollowerIndex(request.getFollowerIndex()); resumeFollowRequest.setParameters(new FollowParameters(parameters)); - client.execute( + clientWithHeaders.execute( ResumeFollowAction.INSTANCE, resumeFollowRequest, ActionListener.wrap( diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 7b588aa922788..f6bb4b75cda7e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -195,8 +195,8 @@ public void getSnapshotInfo(GetSnapshotInfoContext context) { .setMetadata(true) .setNodes(true) .get(ccrSettings.getRecoveryActionTimeout()); - Metadata metadata = response.getState().metadata(); - ImmutableOpenMap indicesMap = metadata.indices(); + Metadata responseMetadata = response.getState().metadata(); + ImmutableOpenMap indicesMap = responseMetadata.indices(); List indices = new ArrayList<>(indicesMap.keySet()); // fork to the snapshot meta pool because the context expects to run on it and asserts that it does @@ -206,7 +206,7 @@ public void getSnapshotInfo(GetSnapshotInfoContext context) { new SnapshotInfo( new Snapshot(this.metadata.name(), SNAPSHOT_ID), indices, - new ArrayList<>(metadata.dataStreams().keySet()), + new ArrayList<>(responseMetadata.dataStreams().keySet()), Collections.emptyList(), response.getState().getNodes().getMaxNodeVersion(), SnapshotState.SUCCESS @@ -248,12 +248,12 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna IndexMetadata.Builder imdBuilder = IndexMetadata.builder(leaderIndex); // Adding the leader index uuid for each shard as custom metadata: - Map metadata = new HashMap<>(); - metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS, String.join(",", leaderHistoryUUIDs)); - metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY, leaderIndexMetadata.getIndexUUID()); - metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY, leaderIndexMetadata.getIndex().getName()); - metadata.put(Ccr.CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY, remoteClusterAlias); - imdBuilder.putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, metadata); + Map customMetadata = new HashMap<>(); + customMetadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS, String.join(",", leaderHistoryUUIDs)); + customMetadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY, leaderIndexMetadata.getIndexUUID()); + customMetadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY, leaderIndexMetadata.getIndex().getName()); + customMetadata.put(Ccr.CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY, remoteClusterAlias); + imdBuilder.putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, customMetadata); imdBuilder.settings(leaderIndexMetadata.getSettings()); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index b188b1663994e..de1feb86440b7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -504,11 +504,11 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { } @Override - protected synchronized void recoverPrimary(IndexShard primary) { + protected synchronized void recoverPrimary(IndexShard primaryShard) { DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); Snapshot snapshot = new Snapshot("foo", new SnapshotId("bar", UUIDs.randomBase64UUID())); ShardRouting routing = ShardRoutingHelper.newWithRestoreSource( - primary.routingEntry(), + primaryShard.routingEntry(), new RecoverySource.SnapshotRecoverySource( UUIDs.randomBase64UUID(), snapshot, @@ -516,9 +516,9 @@ protected synchronized void recoverPrimary(IndexShard primary) { new IndexId("test", UUIDs.randomBase64UUID(random())) ) ); - primary.markAsRecovering("remote recovery from leader", new RecoveryState(routing, localNode, null)); + primaryShard.markAsRecovering("remote recovery from leader", new RecoveryState(routing, localNode, null)); final PlainActionFuture future = PlainActionFuture.newFuture(); - primary.restoreFromRepository(new RestoreOnlyRepository(index.getName()) { + primaryShard.restoreFromRepository(new RestoreOnlyRepository(index.getName()) { @Override public void restoreShard( Store store, @@ -530,11 +530,11 @@ public void restoreShard( ) { ActionListener.completeWith(listener, () -> { IndexShard leader = leaderGroup.getPrimary(); - Lucene.cleanLuceneIndex(primary.store().directory()); + Lucene.cleanLuceneIndex(primaryShard.store().directory()); try (Engine.IndexCommitRef sourceCommit = leader.acquireSafeIndexCommit()) { Store.MetadataSnapshot sourceSnapshot = leader.store().getMetadata(sourceCommit.getIndexCommit()); for (StoreFileMetadata md : sourceSnapshot) { - primary.store() + primaryShard.store() .directory() .copyFrom(leader.store().directory(), md.name(), md.name(), IOContext.DEFAULT); } diff --git a/x-pack/plugin/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/x-pack/plugin/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 608720d2bea0d..683ef7310a016 100644 --- a/x-pack/plugin/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/x-pack/plugin/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -310,16 +310,16 @@ public void testSnapshotAndRestoreAllIncludeSpecificDataStream() throws Exceptio assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); String id2 = indexResponse.getId(); - String id; + String idToGet; String dataStreamToSnapshot; String backingIndexName; if (randomBoolean()) { dataStreamToSnapshot = "ds"; - id = this.id; + idToGet = this.id; backingIndexName = this.dsBackingIndexName; } else { dataStreamToSnapshot = "other-ds"; - id = id2; + idToGet = id2; backingIndexName = this.otherDsBackingIndexName; } boolean filterDuringSnapshotting = randomBoolean(); @@ -354,7 +354,7 @@ public void testSnapshotAndRestoreAllIncludeSpecificDataStream() throws Exceptio assertEquals(1, restoreSnapshotResponse.getRestoreInfo().successfulShards()); - assertEquals(DOCUMENT_SOURCE, client.prepareGet(backingIndexName, id).get().getSourceAsMap()); + assertEquals(DOCUMENT_SOURCE, client.prepareGet(backingIndexName, idToGet).get().getSourceAsMap()); SearchHit[] hits = client.prepareSearch(backingIndexName).get().getHits().getHits(); assertEquals(1, hits.length); assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); @@ -845,7 +845,7 @@ public void testDataStreamNotIncludedInLimitedSnapshot() throws ExecutionExcepti } public void testDeleteDataStreamDuringSnapshot() throws Exception { - Client client = client(); + Client client1 = client(); // this test uses a MockRepository assertAcked(client().admin().cluster().prepareDeleteRepository(REPO)); @@ -866,7 +866,7 @@ public void testDeleteDataStreamDuringSnapshot() throws Exception { logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { - client.prepareIndex(dataStream) + client1.prepareIndex(dataStream) .setOpType(DocWriteRequest.OpType.CREATE) .setId(Integer.toString(i)) .setSource(Collections.singletonMap("@timestamp", "2020-12-12")) @@ -877,7 +877,7 @@ public void testDeleteDataStreamDuringSnapshot() throws Exception { assertDocCount(dataStream, 100L); logger.info("--> snapshot"); - ActionFuture future = client.admin() + ActionFuture future = client1.admin() .cluster() .prepareCreateSnapshot(repositoryName, SNAPSHOT) .setIndices(dataStream) @@ -890,7 +890,7 @@ public void testDeleteDataStreamDuringSnapshot() throws Exception { // non-partial snapshots do not allow delete operations on data streams where snapshot has not been completed try { logger.info("--> delete index while non-partial snapshot is running"); - client.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { dataStream })).actionGet(); + client1.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { dataStream })).actionGet(); fail("Expected deleting index to fail during snapshot"); } catch (SnapshotInProgressException e) { assertThat(e.getMessage(), containsString("Cannot delete data streams that are being snapshotted: [" + dataStream)); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingAppender.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingAppender.java index 5e7429efeaec1..edd9a85862b01 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingAppender.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingAppender.java @@ -71,10 +71,10 @@ public void append(LogEvent event) { /** * Sets whether this appender is enabled or disabled. When disabled, the appender will * not perform indexing operations. - * @param isEnabled the enabled status of the appender. + * @param enabled the enabled status of the appender. */ - public void setEnabled(boolean isEnabled) { - this.isEnabled = isEnabled; + public void setEnabled(boolean enabled) { + this.isEnabled = enabled; } /** From 1c623d0f46fc21cc5584343dcb810cdf2f8df5e0 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Tue, 23 Nov 2021 12:03:45 +0200 Subject: [PATCH 4/7] [ML] No need to use parent task client when internal infer delegates (#80905) In #80731 the infer trained model task was correctly set to have as parent task the internal infer action task when called from there. However, it was done by both setting `Request.setParentTaskId` and using a `ParentTaskAssigningClient`. There is no need to use a parent task client. Instead, to set the parent task on the request we should use `setParentTask` instead of `setParentTaskId` which effectively sets the target task for a `BaseTasksRequest`. The confusion of `BaseTasksRequest` holding two fields both names `parentTaskId` and having two methods both setting the parent task id will be addressed in a separate PR. Co-authored-by: Elastic Machine --- .../xpack/ml/action/TransportInternalInferModelAction.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index 18e66f785fdf4..83afae8777884 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; -import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.core.TimeValue; @@ -190,9 +189,9 @@ private void inferSingleDocAgainstAllocatedModel( Collections.singletonList(doc), TimeValue.MAX_VALUE ); - request.setParentTaskId(taskId); + request.setParentTask(taskId); executeAsyncWithOrigin( - new ParentTaskAssigningClient(client, taskId), + client, ML_ORIGIN, InferTrainedModelDeploymentAction.INSTANCE, request, From b9ae8fdb13a6d6ba35a74553649df6d07872b043 Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Tue, 23 Nov 2021 07:42:56 -0500 Subject: [PATCH 5/7] [DOCS] Fix elasticsearch-reset-password typo (#80919) --- docs/reference/setup/install/docker.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 00de1e5f16dc2..89f0f56f9a72a 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -103,7 +103,7 @@ For example: [source,sh] ---- -docker exec -it es-node01 /usr/share/elasticsearch/bin/reset-elastic-password +docker exec -it es-node01 /usr/share/elasticsearch/bin/elasticsearch-reset-password ---- ==== From 98279cc4dcee5fd47bede4facd93d317d627eefa Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Tue, 23 Nov 2021 14:07:31 +0000 Subject: [PATCH 6/7] Rework breaking changes for new structure (#80907) The structure of the breaking changes / migration guide was updated in #79162 to change the categories and split the breaking changes into different files. This PR amends the changelog generator code in line with this rework. --- .../release/BreakingChangesGenerator.java | 122 +++++++++++++++--- .../internal/release/ChangelogEntry.java | 21 ++- .../release/GenerateReleaseNotesTask.java | 41 +++++- .../internal/release/ReleaseToolsPlugin.java | 6 +- .../src/main/resources/changelog-schema.json | 6 + .../templates/breaking-changes-area.asciidoc | 39 ++++++ .../templates/breaking-changes.asciidoc | 39 +----- .../release/BreakingChangesGeneratorTest.java | 52 +++++++- .../release/GenerateReleaseNotesTaskTest.java | 2 - ...gesGeneratorTest.generateAreaFile.asciidoc | 33 +++++ ...sGeneratorTest.generateIndexFile.asciidoc} | 53 +------- 11 files changed, 293 insertions(+), 121 deletions(-) create mode 100644 build-tools-internal/src/main/resources/templates/breaking-changes-area.asciidoc create mode 100644 build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateAreaFile.asciidoc rename build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/{BreakingChangesGeneratorTest.generateFile.asciidoc => BreakingChangesGeneratorTest.generateIndexFile.asciidoc} (54%) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java index fc33c288cf944..286f23d83e5bb 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/BreakingChangesGenerator.java @@ -11,6 +11,7 @@ import com.google.common.annotations.VisibleForTesting; import org.elasticsearch.gradle.VersionProperties; +import org.gradle.api.GradleException; import java.io.File; import java.io.FileWriter; @@ -18,49 +19,110 @@ import java.nio.file.Files; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.TreeMap; +import java.util.TreeSet; import java.util.stream.Collectors; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.toCollection; /** - * Generates the page that lists the breaking changes and deprecations for a minor version release. + * Generates the page that contains an index into the breaking changes and lists deprecations for a minor version release, + * and the individual pages for each breaking area. */ public class BreakingChangesGenerator { - static void update(File templateFile, File outputFile, List entries) throws IOException { - try (FileWriter output = new FileWriter(outputFile)) { + // Needs to match `changelog-schema.json` + private static final List BREAKING_AREAS = List.of( + "Cluster and node setting", + "Command line tool", + "Index setting", + "JVM option", + "Java API", + "Logging", + "Mapping", + "Packaging", + "Painless", + "REST API", + "System requirement", + "Transform" + ); + + static void update( + File indexTemplateFile, + File indexOutputFile, + File outputDirectory, + File areaTemplateFile, + List entries + ) throws IOException { + if (outputDirectory.exists()) { + if (outputDirectory.isDirectory() == false) { + throw new GradleException("Path [" + outputDirectory + "] exists but isn't a directory!"); + } + } else { + Files.createDirectory(outputDirectory.toPath()); + } + + try (FileWriter output = new FileWriter(indexOutputFile)) { output.write( - generateFile(QualifiedVersion.of(VersionProperties.getElasticsearch()), Files.readString(templateFile.toPath()), entries) + generateIndexFile( + QualifiedVersion.of(VersionProperties.getElasticsearch()), + Files.readString(indexTemplateFile.toPath()), + entries + ) ); } - } - @VisibleForTesting - static String generateFile(QualifiedVersion version, String template, List entries) throws IOException { + String areaTemplate = Files.readString(areaTemplateFile.toPath()); - final Map>> breakingChangesByNotabilityByArea = entries.stream() - .map(ChangelogEntry::getBreaking) - .filter(Objects::nonNull) - .sorted(comparing(ChangelogEntry.Breaking::getTitle)) - .collect( - groupingBy( - ChangelogEntry.Breaking::isNotable, - groupingBy(ChangelogEntry.Breaking::getArea, TreeMap::new, Collectors.toList()) - ) - ); + for (String breakingArea : BREAKING_AREAS) { + final List entriesForArea = entries.stream() + .map(ChangelogEntry::getBreaking) + .filter(entry -> entry != null && breakingArea.equals(entry.getArea())) + .collect(Collectors.toList()); + + if (entriesForArea.isEmpty()) { + continue; + } + + final String outputFilename = breakingArea.toLowerCase(Locale.ROOT).replaceFirst(" and", "").replaceAll(" ", "-") + + "-changes.asciidoc"; + + try (FileWriter output = new FileWriter(outputDirectory.toPath().resolve(outputFilename).toFile())) { + output.write( + generateBreakingAreaFile( + QualifiedVersion.of(VersionProperties.getElasticsearch()), + areaTemplate, + breakingArea, + entriesForArea + ) + ); + } + } + } + @VisibleForTesting + static String generateIndexFile(QualifiedVersion version, String template, List entries) throws IOException { final Map> deprecationsByArea = entries.stream() .map(ChangelogEntry::getDeprecation) .filter(Objects::nonNull) .sorted(comparing(ChangelogEntry.Deprecation::getTitle)) .collect(groupingBy(ChangelogEntry.Deprecation::getArea, TreeMap::new, Collectors.toList())); + final List breakingIncludeList = entries.stream() + .filter(each -> each.getBreaking() != null) + .map(each -> each.getBreaking().getArea().toLowerCase(Locale.ROOT).replaceFirst(" and", "").replaceAll(" ", "-")) + .distinct() + .sorted() + .toList(); + final Map bindings = new HashMap<>(); - bindings.put("breakingChangesByNotabilityByArea", breakingChangesByNotabilityByArea); + bindings.put("breakingIncludeList", breakingIncludeList); bindings.put("deprecationsByArea", deprecationsByArea); bindings.put("isElasticsearchSnapshot", version.isSnapshot()); bindings.put("majorDotMinor", version.getMajor() + "." + version.getMinor()); @@ -70,4 +132,28 @@ static String generateFile(QualifiedVersion version, String template, List entriesForArea + ) throws IOException { + final Map> breakingEntriesByNotability = entriesForArea.stream() + .collect( + groupingBy( + ChangelogEntry.Breaking::isNotable, + toCollection(() -> new TreeSet<>(comparing(ChangelogEntry.Breaking::getTitle))) + ) + ); + + final Map bindings = new HashMap<>(); + bindings.put("breakingArea", breakingArea); + bindings.put("breakingEntriesByNotability", breakingEntriesByNotability); + bindings.put("breakingAreaAnchor", breakingArea.toLowerCase(Locale.ROOT).replaceFirst(" and", "").replaceAll(" ", "_")); + bindings.put("majorMinor", String.valueOf(version.getMajor()) + version.getMinor()); + + return TemplateUtils.render(template, bindings); + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java index 19b9ed2f274a4..94c77768b14b0 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java @@ -215,6 +215,7 @@ public static class Breaking { private String details; private String impact; private boolean notable; + private boolean essSettingChange; public String getArea() { return area; @@ -260,6 +261,14 @@ public String getAnchor() { return generatedAnchor(this.title); } + public boolean isEssSettingChange() { + return essSettingChange; + } + + public void setEssSettingChange(boolean essSettingChange) { + this.essSettingChange = essSettingChange; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -273,23 +282,25 @@ public boolean equals(Object o) { && Objects.equals(area, breaking.area) && Objects.equals(title, breaking.title) && Objects.equals(details, breaking.details) - && Objects.equals(impact, breaking.impact); + && Objects.equals(impact, breaking.impact) + && Objects.equals(essSettingChange, breaking.essSettingChange); } @Override public int hashCode() { - return Objects.hash(area, title, details, impact, notable); + return Objects.hash(area, title, details, impact, notable, essSettingChange); } @Override public String toString() { return String.format( - "Breaking{area='%s', title='%s', details='%s', impact='%s', isNotable=%s}", + "Breaking{area='%s', title='%s', details='%s', impact='%s', notable=%s, essSettingChange=%s}", area, title, details, impact, - notable + notable, + essSettingChange ); } } @@ -351,7 +362,7 @@ public String toString() { } private static String generatedAnchor(String input) { - final List excludes = List.of("the", "is", "a"); + final List excludes = List.of("the", "is", "a", "and"); final String[] words = input.toLowerCase(Locale.ROOT) .replaceAll("[^\\w]+", "_") diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java index 70fafc303bcd3..7f09dbb87d3f0 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java @@ -14,6 +14,8 @@ import org.gradle.api.DefaultTask; import org.gradle.api.GradleException; import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.Directory; +import org.gradle.api.file.DirectoryProperty; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; import org.gradle.api.file.RegularFileProperty; @@ -22,6 +24,7 @@ import org.gradle.api.model.ObjectFactory; import org.gradle.api.tasks.InputFile; import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputDirectory; import org.gradle.api.tasks.OutputFile; import org.gradle.api.tasks.TaskAction; import org.gradle.process.ExecOperations; @@ -55,11 +58,13 @@ public class GenerateReleaseNotesTask extends DefaultTask { private final RegularFileProperty releaseNotesTemplate; private final RegularFileProperty releaseHighlightsTemplate; private final RegularFileProperty breakingChangesTemplate; + private final RegularFileProperty breakingChangesAreaTemplate; private final RegularFileProperty releaseNotesIndexFile; private final RegularFileProperty releaseNotesFile; private final RegularFileProperty releaseHighlightsFile; - private final RegularFileProperty breakingChangesFile; + private final RegularFileProperty breakingChangesIndexFile; + private final DirectoryProperty breakingChangesDirectory; private final GitWrapper gitWrapper; @@ -71,11 +76,13 @@ public GenerateReleaseNotesTask(ObjectFactory objectFactory, ExecOperations exec releaseNotesTemplate = objectFactory.fileProperty(); releaseHighlightsTemplate = objectFactory.fileProperty(); breakingChangesTemplate = objectFactory.fileProperty(); + breakingChangesAreaTemplate = objectFactory.fileProperty(); releaseNotesIndexFile = objectFactory.fileProperty(); releaseNotesFile = objectFactory.fileProperty(); releaseHighlightsFile = objectFactory.fileProperty(); - breakingChangesFile = objectFactory.fileProperty(); + breakingChangesIndexFile = objectFactory.fileProperty(); + breakingChangesDirectory = objectFactory.directoryProperty(); gitWrapper = new GitWrapper(execOperations); } @@ -129,7 +136,9 @@ public void executeTask() throws IOException { LOGGER.info("Generating breaking changes / deprecations notes..."); BreakingChangesGenerator.update( this.breakingChangesTemplate.get().getAsFile(), - this.breakingChangesFile.get().getAsFile(), + this.breakingChangesIndexFile.get().getAsFile(), + this.breakingChangesDirectory.get().getAsFile(), + this.breakingChangesAreaTemplate.get().getAsFile(), entries ); } @@ -339,11 +348,29 @@ public void setReleaseHighlightsFile(RegularFile file) { } @OutputFile - public RegularFileProperty getBreakingChangesFile() { - return breakingChangesFile; + public RegularFileProperty getBreakingChangesIndexFile() { + return breakingChangesIndexFile; } - public void setBreakingChangesFile(RegularFile file) { - this.breakingChangesFile.set(file); + public void setBreakingChangesIndexFile(RegularFile file) { + this.breakingChangesIndexFile.set(file); + } + + public void setBreakingChangesDirectory(Directory breakingChangesDirectory) { + this.breakingChangesDirectory.set(breakingChangesDirectory); + } + + @OutputDirectory + public DirectoryProperty getBreakingChangesDirectory() { + return breakingChangesDirectory; + } + + @InputFile + public RegularFileProperty getBreakingChangesAreaTemplate() { + return breakingChangesAreaTemplate; + } + + public void setBreakingChangesAreaTemplate(RegularFile file) { + this.breakingChangesAreaTemplate.set(file); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java index 8f08da371ec4b..97b0b46365bda 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java @@ -84,11 +84,15 @@ public void apply(Project project) { task.setReleaseHighlightsFile(projectDirectory.file("docs/reference/release-notes/highlights.asciidoc")); task.setBreakingChangesTemplate(projectDirectory.file(RESOURCES + "templates/breaking-changes.asciidoc")); - task.setBreakingChangesFile( + task.setBreakingChangesIndexFile( projectDirectory.file( String.format("docs/reference/migration/migrate_%d_%d.asciidoc", version.getMajor(), version.getMinor()) ) ); + task.setBreakingChangesAreaTemplate(projectDirectory.file(RESOURCES + "templates/breaking-changes-area.asciidoc")); + task.setBreakingChangesDirectory( + projectDirectory.dir(String.format("docs/reference/migration/migrate_%d_%d", version.getMajor(), version.getMinor())) + ); task.dependsOn(validateChangelogsTask); }); diff --git a/build-tools-internal/src/main/resources/changelog-schema.json b/build-tools-internal/src/main/resources/changelog-schema.json index 7eb80babe3c15..e96e014fa19e3 100644 --- a/build-tools-internal/src/main/resources/changelog-schema.json +++ b/build-tools-internal/src/main/resources/changelog-schema.json @@ -157,6 +157,9 @@ }, "notable": { "type": "boolean" + }, + "ess_setting_change": { + "type": "boolean" } }, "required": [ @@ -179,6 +182,9 @@ "body": { "type": "string", "minLength": 1 + }, + "ess_setting_change": { + "type": "boolean" } }, "required": [ diff --git a/build-tools-internal/src/main/resources/templates/breaking-changes-area.asciidoc b/build-tools-internal/src/main/resources/templates/breaking-changes-area.asciidoc new file mode 100644 index 0000000000000..43d6d376bbbbe --- /dev/null +++ b/build-tools-internal/src/main/resources/templates/breaking-changes-area.asciidoc @@ -0,0 +1,39 @@ +[discrete] +[[breaking_${majorMinor}_${breakingAreaAnchor}]] +==== ${breakingArea} + +//NOTE: The notable-breaking-changes tagged regions are re-used in the +//Installation and Upgrade Guide + +TIP: {ess-setting-change} + +<% +[true, false].each { isNotable -> + def breakingChanges = breakingEntriesByNotability.getOrDefault(isNotable, []) + + if (breakingChanges.isEmpty() == false) { + if (isNotable) { + /* No newline here, one will be added below */ + print "// tag::notable-breaking-changes[]" + } + + for (breaking in breakingChanges) { %> +[[${ breaking.anchor }]] +. ${breaking.title}${ breaking.essSettingChange ? ' {ess-icon}' : '' } +[%collapsible] +==== +*Details* + +${breaking.details.trim()} + +*Impact* + +${breaking.impact.trim()} +==== +<% + } + + if (isNotable) { + print "// end::notable-breaking-changes[]\n" + } + } +} +%> diff --git a/build-tools-internal/src/main/resources/templates/breaking-changes.asciidoc b/build-tools-internal/src/main/resources/templates/breaking-changes.asciidoc index 38573747863e9..dc240761a5714 100644 --- a/build-tools-internal/src/main/resources/templates/breaking-changes.asciidoc +++ b/build-tools-internal/src/main/resources/templates/breaking-changes.asciidoc @@ -9,11 +9,11 @@ your application to {es} ${majorDotMinor}. See also <> and <>. <% if (isElasticsearchSnapshot) { %> -coming[${version}] +coming::[${version}] <% } %> //NOTE: The notable-breaking-changes tagged regions are re-used in the //Installation and Upgrade Guide -<% if (breakingChangesByNotabilityByArea.isEmpty() == false) { %> +<% if (breakingIncludeList.isEmpty() == false) { %> [discrete] [[breaking-changes-${majorDotMinor}]] === Breaking changes @@ -29,41 +29,14 @@ Significant changes in behavior are deprecated in a minor release and the old behavior is supported until the next major release. To find out if you are using any deprecated functionality, enable <>. -<% -[true, false].each { isNotable -> - def breakingChangesByArea = breakingChangesByNotabilityByArea.getOrDefault(isNotable, []) - - breakingChangesByArea.eachWithIndex { area, breakingChanges, i -> - print "\n" - - if (isNotable) { - print "// tag::notable-breaking-changes[]\n" - } - print "[discrete]\n" - print "[[breaking_${majorMinor}_${ area.toLowerCase().replaceAll("[^a-z0-9]+", "_") }]]\n" - print "==== ${area}\n" - - for (breaking in breakingChanges) { %> -[[${ breaking.anchor }]] -.${breaking.title} -[%collapsible] -==== -*Details* + -${breaking.details.trim()} - -*Impact* + -${breaking.impact.trim()} -==== <% - } + for (include in breakingIncludeList) { + print "include::migrate_${version.major}_${version.minor}/${include}.asciidoc[]\n"; + } - if (isNotable) { - print "// end::notable-breaking-changes[]\n" - } - } -} } + if (deprecationsByArea.empty == false) { %> [discrete] diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java index 601d3b8ed4870..7d05fbb82a328 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java @@ -16,8 +16,8 @@ import java.util.List; import java.util.Objects; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; public class BreakingChangesGeneratorTest { @@ -25,17 +25,46 @@ public class BreakingChangesGeneratorTest { * Check that the breaking changes can be correctly generated. */ @Test - public void generateFile_rendersCorrectMarkup() throws Exception { + public void generateIndexFile_rendersCorrectMarkup() throws Exception { // given: final String template = getResource("/templates/breaking-changes.asciidoc"); final String expectedOutput = getResource( - "/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc" + "/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateIndexFile.asciidoc" ); final List entries = getEntries(); // when: - final String actualOutput = BreakingChangesGenerator.generateFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries); + final String actualOutput = BreakingChangesGenerator.generateIndexFile(QualifiedVersion.of("8.4.0-SNAPSHOT"), template, entries); + + // then: + assertThat(actualOutput, equalTo(expectedOutput)); + } + + /** + * Check that the breaking changes for a specific area can be correctly generated. + */ + @Test + public void generateAreaFile_rendersCorrectMarkup() throws Exception { + // given: + final String template = getResource("/templates/breaking-changes-area.asciidoc"); + final String expectedOutput = getResource( + "/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateAreaFile.asciidoc" + ); + final String breakingArea = "Cluster and node setting"; + + final List entries = getEntries().stream() + .map(ChangelogEntry::getBreaking) + .filter(each -> each.getArea().equals(breakingArea)) + .toList(); + + // when: + final String actualOutput = BreakingChangesGenerator.generateBreakingAreaFile( + QualifiedVersion.of("8.4.0-SNAPSHOT"), + template, + breakingArea, + entries + ); // then: assertThat(actualOutput, equalTo(expectedOutput)); @@ -58,7 +87,7 @@ private List getEntries() { breaking2.setNotable(true); breaking2.setTitle("Breaking change number 2"); - breaking2.setArea("Cluster"); + breaking2.setArea("Cluster and node setting"); breaking2.setDetails("Breaking change details 2"); breaking2.setImpact("Breaking change impact description 2"); @@ -72,7 +101,18 @@ private List getEntries() { breaking3.setDetails("Breaking change details 3"); breaking3.setImpact("Breaking change impact description 3"); - return List.of(entry1, entry2, entry3); + ChangelogEntry entry4 = new ChangelogEntry(); + ChangelogEntry.Breaking breaking4 = new ChangelogEntry.Breaking(); + entry4.setBreaking(breaking4); + + breaking4.setNotable(true); + breaking4.setTitle("Breaking change number 4"); + breaking4.setArea("Cluster and node setting"); + breaking4.setDetails("Breaking change details 4"); + breaking4.setImpact("Breaking change impact description 4"); + breaking4.setEssSettingChange(true); + + return List.of(entry1, entry2, entry3, entry4); } private String getResource(String name) throws Exception { diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java index 8f35997c1e7d5..d2deffdbf332f 100644 --- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTaskTest.java @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.internal.test.GradleUnitTestCase; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; import java.io.File; @@ -34,7 +33,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -@Ignore("https://github.com/elastic/elasticsearch/issues/77190") public class GenerateReleaseNotesTaskTest extends GradleUnitTestCase { private GitWrapper gitWrapper; diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateAreaFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateAreaFile.asciidoc new file mode 100644 index 0000000000000..dcd4d646d5a6a --- /dev/null +++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateAreaFile.asciidoc @@ -0,0 +1,33 @@ +[discrete] +[[breaking_84_cluster_node_setting]] +==== Cluster and node setting + +//NOTE: The notable-breaking-changes tagged regions are re-used in the +//Installation and Upgrade Guide + +TIP: {ess-setting-change} + +// tag::notable-breaking-changes[] +[[breaking_change_number_2]] +. Breaking change number 2 +[%collapsible] +==== +*Details* + +Breaking change details 2 + +*Impact* + +Breaking change impact description 2 +==== + +[[breaking_change_number_4]] +. Breaking change number 4 {ess-icon} +[%collapsible] +==== +*Details* + +Breaking change details 4 + +*Impact* + +Breaking change impact description 4 +==== +// end::notable-breaking-changes[] + diff --git a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateIndexFile.asciidoc similarity index 54% rename from build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc rename to build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateIndexFile.asciidoc index 4a61c2de4016f..277833e0171be 100644 --- a/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateFile.asciidoc +++ b/build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.generateIndexFile.asciidoc @@ -9,7 +9,7 @@ your application to {es} 8.4. See also <> and <>. -coming[8.4.0-SNAPSHOT] +coming::[8.4.0-SNAPSHOT] //NOTE: The notable-breaking-changes tagged regions are re-used in the //Installation and Upgrade Guide @@ -30,52 +30,7 @@ the old behavior is supported until the next major release. To find out if you are using any deprecated functionality, enable <>. -// tag::notable-breaking-changes[] -[discrete] -[[breaking_84_api]] -==== API - -[[breaking_change_number_1]] -.Breaking change number 1 -[%collapsible] -==== -*Details* + -Breaking change details 1 - -*Impact* + -Breaking change impact description 1 -==== -// end::notable-breaking-changes[] - -// tag::notable-breaking-changes[] -[discrete] -[[breaking_84_cluster]] -==== Cluster - -[[breaking_change_number_2]] -.Breaking change number 2 -[%collapsible] -==== -*Details* + -Breaking change details 2 - -*Impact* + -Breaking change impact description 2 -==== -// end::notable-breaking-changes[] - -[discrete] -[[breaking_84_transform]] -==== Transform - -[[breaking_change_number_3]] -.Breaking change number 3 -[%collapsible] -==== -*Details* + -Breaking change details 3 - -*Impact* + -Breaking change impact description 3 -==== +include::migrate_8_4/api.asciidoc[] +include::migrate_8_4/cluster-node-setting.asciidoc[] +include::migrate_8_4/transform.asciidoc[] From d2217ebaa36ab47c4319408e7e262408da45feb7 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 23 Nov 2021 15:14:13 +0100 Subject: [PATCH 7/7] Cleanup SLM History Item .equals (#80938) There was some confusing dead code here and the field comparisons were done in a needlessly confusing manner also. --- .../slm/history/SnapshotHistoryItem.java | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java index 38c0225668538..fd24e697818b5 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryItem.java @@ -243,19 +243,14 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - boolean result; - if (this == o) result = true; - if (o == null || getClass() != o.getClass()) result = false; - SnapshotHistoryItem that1 = (SnapshotHistoryItem) o; - result = isSuccess() == that1.isSuccess() - && timestamp == that1.getTimestamp() - && Objects.equals(getPolicyId(), that1.getPolicyId()) - && Objects.equals(getRepository(), that1.getRepository()) - && Objects.equals(getSnapshotName(), that1.getSnapshotName()) - && Objects.equals(getOperation(), that1.getOperation()); - if (result == false) return false; SnapshotHistoryItem that = (SnapshotHistoryItem) o; - return Objects.equals(getSnapshotConfiguration(), that.getSnapshotConfiguration()) + return isSuccess() == that.isSuccess() + && timestamp == that.getTimestamp() + && Objects.equals(getPolicyId(), that.getPolicyId()) + && Objects.equals(getRepository(), that.getRepository()) + && Objects.equals(getSnapshotName(), that.getSnapshotName()) + && Objects.equals(getOperation(), that.getOperation()) + && Objects.equals(getSnapshotConfiguration(), that.getSnapshotConfiguration()) && Objects.equals(getErrorDetails(), that.getErrorDetails()); }