From f2ac26f385490eda9c15616498f33e1b0713b48a Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 14 Oct 2021 09:00:52 +0200 Subject: [PATCH] Filter original indices in shard level request (#78508) Today the search action send the full list of original indices on every shard request. This change restricts the list to the concrete index of the shard or the alias that was used to resolve it. Relates #78314 --- .../aggregations/TermsReduceBenchmark.java | 5 +- .../client/core/CountResponseTests.java | 2 +- .../DiscountedCumulativeGainTests.java | 7 +- .../index/rankeval/EvalQueryQualityTests.java | 3 +- .../rankeval/ExpectedReciprocalRankTests.java | 3 +- .../rankeval/MeanReciprocalRankTests.java | 3 +- .../index/rankeval/PrecisionAtKTests.java | 7 +- .../index/rankeval/RankEvalResponseTests.java | 5 +- .../index/rankeval/RecallAtKTests.java | 5 +- .../action/IndicesRequestIT.java | 24 ++- .../TransportClusterSearchShardsAction.java | 4 +- .../search/AbstractSearchAsyncAction.java | 38 ++-- .../search/CanMatchPreFilterSearchPhase.java | 3 +- .../action/search/DfsQueryPhase.java | 14 +- .../action/search/FetchSearchPhase.java | 20 +- .../action/search/SearchPhaseContext.java | 5 + .../search/SearchScrollAsyncAction.java | 2 +- .../action/search/SearchShardIterator.java | 2 +- .../action/search/ShardSearchFailure.java | 3 +- .../TransportOpenPointInTimeAction.java | 6 +- .../action/search/TransportSearchAction.java | 66 +++++- .../cluster/metadata/DataStreamAlias.java | 4 + .../metadata/IndexNameExpressionResolver.java | 13 +- .../elasticsearch/indices/IndicesService.java | 2 +- .../org/elasticsearch/search/SearchHit.java | 3 +- .../elasticsearch/search/SearchService.java | 3 +- .../search/SearchShardTarget.java | 12 +- .../ElasticsearchExceptionTests.java | 37 ++-- .../ExceptionSerializationTests.java | 3 +- .../elasticsearch/ExceptionsHelperTests.java | 3 +- .../AbstractSearchAsyncActionTests.java | 11 +- .../search/ClearScrollControllerTests.java | 12 +- .../action/search/CountedCollectorTests.java | 5 +- .../action/search/DfsQueryPhaseTests.java | 21 +- .../action/search/FetchSearchPhaseTests.java | 21 +- .../action/search/MockSearchPhaseContext.java | 5 + .../search/QueryPhaseResultConsumerTests.java | 3 +- .../search/SearchPhaseControllerTests.java | 33 ++- .../SearchPhaseExecutionExceptionTests.java | 11 +- .../SearchQueryThenFetchAsyncActionTests.java | 8 +- .../search/SearchResponseMergerTests.java | 9 +- .../search/SearchScrollAsyncActionTests.java | 9 +- .../search/SearchShardIteratorTests.java | 1 - .../search/ShardSearchFailureTests.java | 7 +- .../search/TransportSearchActionTests.java | 5 +- .../search/TransportSearchHelperTests.java | 6 +- .../IndexNameExpressionResolverTests.java | 34 +-- .../rest/BytesRestResponseTests.java | 5 +- .../rest/action/RestActionsTests.java | 3 +- .../search/DefaultSearchContextTests.java | 5 +- .../elasticsearch/search/SearchHitTests.java | 5 +- .../elasticsearch/search/SearchHitsTests.java | 3 +- .../SearchProfileResultsBuilderTests.java | 2 +- .../search/query/QuerySearchResultTests.java | 3 +- .../elasticsearch/test/TestSearchContext.java | 3 +- .../xpack/search/AsyncSearchTaskTests.java | 5 +- x-pack/plugin/security/build.gradle | 1 + ...onsWithAliasesWildcardsAndRegexsTests.java | 195 +++++++++++++++++- .../SeqNoPrimaryTermAndIndexTests.java | 2 +- .../CompareConditionSearchTests.java | 3 +- .../xpack/watcher/WatcherServiceTests.java | 3 +- .../execution/TriggeredWatchStoreTests.java | 5 +- 62 files changed, 496 insertions(+), 255 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java index 64ad19a9a96e1..7b53bb55f51bd 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.QueryPhaseResultConsumer; import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchProgressListener; @@ -183,9 +182,7 @@ public SearchPhaseController.ReducedQueryPhase reduceAggs(TermsList candidateLis new DocValueFormat[] { DocValueFormat.RAW } ); result.aggregations(candidateList.get(i)); - result.setSearchShardTarget( - new SearchShardTarget("node", new ShardId(new Index("index", "index"), i), null, OriginalIndices.NONE) - ); + result.setSearchShardTarget(new SearchShardTarget("node", new ShardId(new Index("index", "index"), i), null)); shards.add(result); } SearchRequest request = new SearchRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java index 7f2500656d1cd..46eede441529a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java @@ -80,7 +80,7 @@ private static ShardSearchFailure createShardFailureTestItem() { String nodeId = randomAlphaOfLengthBetween(5, 10); String indexName = randomAlphaOfLengthBetween(5, 10); searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null, null); + new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null); } return new ShardSearchFailure(ex, searchShardTarget); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index 73d69ea47a3b7..2b7a6df40a3af 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -62,7 +61,7 @@ public void testDCGAt() { for (int i = 0; i < 6; i++) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA); @@ -112,7 +111,7 @@ public void testDCGAtSixMissingRatings() { } } hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, rated); @@ -169,7 +168,7 @@ public void testDCGAtFourMoreRatings() { SearchHit[] hits = new SearchHit[4]; for (int i = 0; i < 4; i++) { hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java index d0d27822c9332..adaa984b4a817 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -46,7 +45,7 @@ public static EvalQueryQuality randomEvalQueryQuality() { for (int i = 0; i < numberOfSearchHits; i++) { RatedSearchHit ratedSearchHit = RatedSearchHitTests.randomRatedSearchHit(); // we need to associate each hit with an index name otherwise rendering will not work - ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null, OriginalIndices.NONE)); + ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null)); ratedHits.add(ratedSearchHit); } EvalQueryQuality evalQueryQuality = new EvalQueryQuality(randomAlphaOfLength(10), diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java index cc43c9ae2f9d6..1447419366c8b 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -105,7 +104,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } return hits; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index 8bb61ccaa62d6..090041b80d56c 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -193,7 +192,7 @@ private static SearchHit[] createSearchHits(int from, int to, String index) { SearchHit[] hits = new SearchHit[to + 1 - from]; for (int i = from; i <= to; i++) { hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); } return hits; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index 91010256808dd..d86b1b89d4400 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -102,7 +101,7 @@ public void testIgnoreUnlabeled() { // add an unlabeled search hit SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap()); - searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001); @@ -121,7 +120,7 @@ public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[5]; for (int i = 0; i < 5; i++) { hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); assertEquals(0.0d, evaluated.metricScore(), 0.00001); @@ -243,7 +242,7 @@ private static SearchHit[] toSearchHits(List rated, String index) SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); } return hits; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 4b3e1190fd50e..d8b61ad190ef6 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -58,7 +57,7 @@ public class RankEvalResponseTests extends ESTestCase { new IllegalArgumentException("Closed resource", new RuntimeException("Resource")), new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] { new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)) }), + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)) }), new ElasticsearchException("Parsing failed", new ParsingException(9, 42, "Wrong state", new NullPointerException("Unexpected null value"))) }; @@ -169,7 +168,7 @@ public void testToXContent() throws IOException { private static RatedSearchHit searchHit(String index, int docId, Integer rating) { SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap()); - hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); hit.score(1.0f); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java index ec391438cd630..a129002b7d450 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -104,7 +103,7 @@ public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[k]; for (int i = 0; i < k; i++) { hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } EvalQueryQuality evaluated = (new RecallAtK()).evaluate("id", hits, Collections.emptyList()); @@ -226,7 +225,7 @@ private static SearchHit[] toSearchHits(List rated, String index) SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); } return hits; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 8660f8c1ebb47..99e2130aa0a23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -555,9 +555,10 @@ public void testSearchQueryThenFetch() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); clearInterceptedActions(); - assertSameIndices(searchRequest, SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); + assertIndicesSubset(Arrays.asList(searchRequest.indices()), SearchTransportService.QUERY_ACTION_NAME, + SearchTransportService.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices - assertSameIndicesOptionalRequests(searchRequest, SearchTransportService.FREE_CONTEXT_ACTION_NAME); + assertIndicesSubsetOptionalRequests(Arrays.asList(searchRequest.indices()), SearchTransportService.FREE_CONTEXT_ACTION_NAME); } public void testSearchDfsQueryThenFetch() throws Exception { @@ -576,10 +577,10 @@ public void testSearchDfsQueryThenFetch() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); clearInterceptedActions(); - assertSameIndices(searchRequest, SearchTransportService.DFS_ACTION_NAME, SearchTransportService.QUERY_ID_ACTION_NAME, - SearchTransportService.FETCH_ID_ACTION_NAME); + assertIndicesSubset(Arrays.asList(searchRequest.indices()), SearchTransportService.DFS_ACTION_NAME, + SearchTransportService.QUERY_ID_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices - assertSameIndicesOptionalRequests(searchRequest, SearchTransportService.FREE_CONTEXT_ACTION_NAME); + assertIndicesSubsetOptionalRequests(Arrays.asList(searchRequest.indices()), SearchTransportService.FREE_CONTEXT_ACTION_NAME); } private static void assertSameIndices(IndicesRequest originalRequest, String... actions) { @@ -603,11 +604,22 @@ private static void assertSameIndices(IndicesRequest originalRequest, boolean op } } } + private static void assertIndicesSubset(List indices, String... actions) { + assertIndicesSubset(indices, false, actions); + } + + private static void assertIndicesSubsetOptionalRequests(List indices, String... actions) { + assertIndicesSubset(indices, true, actions); + } + + private static void assertIndicesSubset(List indices, boolean optional, String... actions) { //indices returned by each bulk shard request need to be a subset of the original indices for (String action : actions) { List requests = consumeTransportRequests(action); - assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); + if (optional == false) { + assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); + } for (TransportRequest internalRequest : requests) { IndicesRequest indicesRequest = convertRequest(internalRequest); for (String index : indicesRequest.indices()) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index cd751fc9c53a5..756046843cec4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -63,8 +63,8 @@ protected void masterOperation(Task task, final ClusterSearchShardsRequest reque Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); - final String[] aliases = indexNameExpressionResolver.indexAliases(clusterState, index, aliasMetadata -> true, true, - indicesAndAliases); + final String[] aliases = indexNameExpressionResolver.indexAliases(clusterState, index, + aliasMetadata -> true, dataStreamAlias -> true, true, indicesAndAliases); indicesAndFilters.put(index, new AliasFilter(aliasFilter.getQueryBuilder(), aliases)); } diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index b2d1d64c76a16..8decbd3836f23 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.action.support.TransportActions; @@ -41,6 +42,7 @@ import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -66,6 +68,7 @@ abstract class AbstractSearchAsyncAction exten private final Executor executor; private final ActionListener listener; private final SearchRequest request; + /** * Used by subclasses to resolve node ids to DiscoveryNodes. **/ @@ -85,7 +88,8 @@ abstract class AbstractSearchAsyncAction exten protected final GroupShardsIterator toSkipShardsIts; protected final GroupShardsIterator shardsIts; - private final Map shardItIndexMap; + private final SearchShardIterator[] shardIterators; + private final Map shardIndexMap; private final int expectedTotalOps; private final AtomicInteger totalOps = new AtomicInteger(); private final int maxConcurrentRequestsPerNode; @@ -115,16 +119,18 @@ abstract class AbstractSearchAsyncAction exten } this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators); this.shardsIts = new GroupShardsIterator<>(iterators); - this.shardItIndexMap = new HashMap<>(); // we compute the shard index based on the natural order of the shards // that participate in the search request. This means that this number is // consistent between two requests that target the same shards. - List naturalOrder = new ArrayList<>(iterators); - CollectionUtil.timSort(naturalOrder); - for (int i = 0; i < naturalOrder.size(); i++) { - shardItIndexMap.put(naturalOrder.get(i), i); + Map shardMap = new HashMap<>(); + List searchIterators = new ArrayList<>(iterators); + CollectionUtil.timSort(searchIterators); + for (int i = 0; i < searchIterators.size(); i++) { + shardMap.put(searchIterators.get(i), i); } + this.shardIndexMap = Collections.unmodifiableMap(shardMap); + this.shardIterators = searchIterators.toArray(SearchShardIterator[]::new); // we need to add 1 for non active partition, since we count it in the total. This means for each shard in the iterator we sum up // it's number of active shards but use 1 as the default if no replica of a shard is active at this point. @@ -221,8 +227,8 @@ public final void run() { for (int i = 0; i < shardsIts.size(); i++) { final SearchShardIterator shardRoutings = shardsIts.get(i); assert shardRoutings.skip() == false; - assert shardItIndexMap.containsKey(shardRoutings); - int shardIndex = shardItIndexMap.get(shardRoutings); + assert shardIndexMap.containsKey(shardRoutings); + int shardIndex = shardIndexMap.get(shardRoutings); performPhaseOnShard(shardIndex, shardRoutings, shardRoutings.nextOrNull()); } } @@ -287,8 +293,7 @@ protected void performPhaseOnShard(final int shardIndex, final SearchShardIterat */ if (shard == null) { assert assertExecuteOnStartThread(); - SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), - shardIt.getClusterAlias(), shardIt.getOriginalIndices()); + SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()); onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } else { final PendingExecutions pendingExecutions = throttleConcurrentRequests ? @@ -608,6 +613,11 @@ public final SearchRequest getRequest() { return request; } + @Override + public OriginalIndices getOriginalIndices(int shardIndex) { + return shardIterators[shardIndex].getOriginalIndices(); + } + @Override public boolean isPartOfPointInTime(ShardSearchContextId contextId) { final PointInTimeBuilder pointInTimeBuilder = request.pointInTimeBuilder(); @@ -674,7 +684,7 @@ private void raisePhaseFailure(SearchPhaseExecutionException exception) { try { SearchShardTarget searchShardTarget = entry.getSearchShardTarget(); Transport.Connection connection = getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - sendReleaseSearchContext(entry.getContextId(), connection, searchShardTarget.getOriginalIndices()); + sendReleaseSearchContext(entry.getContextId(), connection, getOriginalIndices(entry.getShardIndex())); } catch (Exception inner) { inner.addSuppressed(exception); logger.trace("failed to release context", inner); @@ -723,9 +733,9 @@ public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shar AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); - ShardSearchRequest shardRequest = new ShardSearchRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), shardIndex, - getNumShards(), filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), - shardIt.getSearchContextId(), shardIt.getSearchContextKeepAlive()); + ShardSearchRequest shardRequest = new ShardSearchRequest(shardIt.getOriginalIndices(), request, + shardIt.shardId(), shardIndex, getNumShards(), filter, indexBoost, timeProvider.getAbsoluteStartMillis(), + shardIt.getClusterAlias(), shardIt.getSearchContextId(), shardIt.getSearchContextKeepAlive()); // if we already received a search result we can inform the shard that it // can return a null response if the request rewrites to match none rather // than creating an empty response in the search thread pool. diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index 09ae052bd1d5e..97726626abe55 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -148,8 +148,7 @@ protected void performPhaseOnShard(int shardIndex, SearchShardIterator shardIt, } CanMatchResponse result = new CanMatchResponse(canMatch, null); - result.setSearchShardTarget(shard == null ? new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias(), - shardIt.getOriginalIndices()) : shard); + result.setSearchShardTarget(shard == null ? new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()) : shard); result.setShardIndex(shardIndex); fork(() -> onShardResult(result, shardIt)); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 98204f19e9e96..76d60235162f9 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -64,13 +64,13 @@ public void run() throws IOException { searchResults.size(), () -> context.executeNextPhase(this, nextPhaseFactory.apply(queryResult)), context); for (final DfsSearchResult dfsResult : searchResults) { - final SearchShardTarget searchShardTarget = dfsResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - QuerySearchRequest querySearchRequest = new QuerySearchRequest(searchShardTarget.getOriginalIndices(), + final SearchShardTarget shardTarget = dfsResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); + QuerySearchRequest querySearchRequest = new QuerySearchRequest(context.getOriginalIndices(dfsResult.getShardIndex()), dfsResult.getContextId(), dfsResult.getShardSearchRequest(), dfs); final int shardIndex = dfsResult.getShardIndex(); searchTransportService.sendExecuteQuery(connection, querySearchRequest, context.getTask(), - new SearchActionListener(searchShardTarget, shardIndex) { + new SearchActionListener(shardTarget, shardIndex) { @Override protected void innerOnResponse(QuerySearchResult response) { @@ -86,15 +86,15 @@ public void onFailure(Exception exception) { try { context.getLogger().debug(() -> new ParameterizedMessage("[{}] Failed to execute query phase", querySearchRequest.contextId()), exception); - progressListener.notifyQueryFailure(shardIndex, searchShardTarget, exception); - counter.onFailure(shardIndex, searchShardTarget, exception); + progressListener.notifyQueryFailure(shardIndex, shardTarget, exception); + counter.onFailure(shardIndex, shardTarget, exception); } finally { if (context.isPartOfPointInTime(querySearchRequest.contextId()) == false) { // the query might not have been executed at all (for example because thread pool rejected // execution) and the search context that was created in dfs phase might not be released. // release it again to be in the safe side context.sendReleaseSearchContext( - querySearchRequest.contextId(), connection, searchShardTarget.getOriginalIndices()); + querySearchRequest.contextId(), connection, context.getOriginalIndices(shardIndex)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index f944feae10fa4..a54c055843aaf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -136,14 +136,13 @@ private void innerRun() throws Exception { // in any case we count down this result since we don't talk to this shard anymore counter.countDown(); } else { - SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), - searchShardTarget.getNodeId()); + SearchShardTarget shardTarget = queryResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getContextId(), i, entry, - lastEmittedDocPerShard, searchShardTarget.getOriginalIndices(), queryResult.getShardSearchRequest(), - queryResult.getRescoreDocIds()); - executeFetch(queryResult.getShardIndex(), searchShardTarget, counter, fetchSearchRequest, queryResult.queryResult(), - connection); + lastEmittedDocPerShard, context.getOriginalIndices(queryResult.getShardIndex()), + queryResult.getShardSearchRequest(), queryResult.getRescoreDocIds()); + executeFetch(queryResult.getShardIndex(), shardTarget, counter, fetchSearchRequest, + queryResult.queryResult(), connection); } } } @@ -201,9 +200,10 @@ private void releaseIrrelevantSearchContext(QuerySearchResult queryResult) { && context.getRequest().scroll() == null && (context.isPartOfPointInTime(queryResult.getContextId()) == false)) { try { - SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - context.sendReleaseSearchContext(queryResult.getContextId(), connection, searchShardTarget.getOriginalIndices()); + SearchShardTarget shardTarget = queryResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); + context.sendReleaseSearchContext(queryResult.getContextId(), connection, + context.getOriginalIndices(queryResult.getShardIndex())); } catch (Exception e) { context.getLogger().trace("failed to release context", e); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index c0f2cf32aa6b6..5dc8e572cd8c1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -47,6 +47,11 @@ interface SearchPhaseContext extends Executor { */ SearchRequest getRequest(); + /** + * Returns the targeted {@link OriginalIndices} for the provided {@code shardIndex}. + */ + OriginalIndices getOriginalIndices(int shardIndex); + /** * Checks if the given context id is part of the point in time of this search (if exists). * We should not release search contexts that belong to the point in time during or after searches. diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index a2b5a92d1d07f..49067379983f5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -141,7 +141,7 @@ protected void setSearchShardTarget(T response) { // we need this down the road for subseq. phases SearchShardTarget searchShardTarget = response.getSearchShardTarget(); response.setSearchShardTarget(new SearchShardTarget(searchShardTarget.getNodeId(), searchShardTarget.getShardId(), - target.getClusterAlias(), null)); + target.getClusterAlias())); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java index 63ae879a58904..a3fc006e6fc92 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java @@ -85,7 +85,7 @@ public String getClusterAlias() { SearchShardTarget nextOrNull() { final String nodeId = targetNodesIterator.nextOrNull(); if (nodeId != null) { - return new SearchShardTarget(nodeId, shardId, clusterAlias, originalIndices); + return new SearchShardTarget(nodeId, shardId, clusterAlias); } return null; } diff --git a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 6377abba32904..960be012ca205 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.core.Nullable; @@ -158,7 +157,7 @@ public static ShardSearchFailure fromXContent(XContentParser parser) throws IOEx SearchShardTarget searchShardTarget = null; if (nodeId != null) { searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias); } return new ShardSearchFailure(exception, searchShardTarget); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 29b74b4096d61..24f2523be7b5e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -79,10 +79,10 @@ protected void doExecute(Task task, OpenPointInTimeRequest request, ActionListen searchRequest, "open_search_context", true, - (searchTask, shardTarget, connection, phaseListener) -> { + (searchTask, shardIt, connection, phaseListener) -> { final ShardOpenReaderRequest shardRequest = new ShardOpenReaderRequest( - shardTarget.getShardId(), - shardTarget.getOriginalIndices(), + shardIt.shardId(), + shardIt.getOriginalIndices(), request.keepAlive() ); transportService.sendChildRequest( diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 847be29fa4ddc..5d77c68941aef 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -82,6 +83,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.BiFunction; +import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -140,10 +142,43 @@ public TransportSearchAction(ThreadPool threadPool, this.executorSelector = executorSelector; } - private Map buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState, - Index[] concreteIndices, Map remoteAliasMap) { + private Map buildPerIndexOriginalIndices(ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices, + IndicesOptions indicesOptions) { + Map res = new HashMap<>(); + for (Index index : concreteIndices) { + clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); + + String[] aliases = indexNameExpressionResolver.indexAliases(clusterState, index.getName(), aliasMetadata -> true, + dataStreamAlias -> true, true, indicesAndAliases); + BooleanSupplier hasDataStreamRef = () -> { + IndexAbstraction ret = clusterState.getMetadata().getIndicesLookup().get(index.getName()); + if (ret == null || ret.getParentDataStream() == null) { + return false; + } + return indicesAndAliases.contains(ret.getParentDataStream().getName()); + }; + List finalIndices = new ArrayList<>(); + if (aliases == null + || aliases.length == 0 + || indicesAndAliases.contains(index.getName()) + || hasDataStreamRef.getAsBoolean()) { + finalIndices.add(index.getName()); + } + if (aliases != null) { + finalIndices.addAll(Arrays.asList(aliases)); + } + res.put(index.getUUID(), new OriginalIndices(finalIndices.toArray(String[]::new), indicesOptions)); + } + return Collections.unmodifiableMap(res); + } + + private Map buildPerIndexAliasFilter(ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices, + Map remoteAliasMap) { final Map aliasFilterMap = new HashMap<>(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, index.getName(), indicesAndAliases); @@ -225,7 +260,7 @@ protected void doExecute(Task task, SearchRequest searchRequest, ActionListener< } public interface SinglePhaseSearchAction { - void executeOnShardTarget(SearchTask searchTask, SearchShardTarget target, Transport.Connection connection, + void executeOnShardTarget(SearchTask searchTask, SearchShardIterator shardIt, Transport.Connection connection, ActionListener listener); } @@ -248,7 +283,7 @@ public AbstractSearchAsyncAction asyncSearchAction( protected void executePhaseOnShard(SearchShardIterator shardIt, SearchShardTarget shard, SearchActionListener listener) { final Transport.Connection connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - phaseSearchAction.executeOnShardTarget(task, shard, connection, listener); + phaseSearchAction.executeOnShardTarget(task, shardIt, connection, listener); } @Override @@ -585,8 +620,11 @@ static List getRemoteShardsIteratorFromPointInTime(Map localShardRoutings = clusterService.operationRouting().searchShards(clusterState, concreteLocalIndices, routingMap, searchRequest.preference(), searchService.getResponseCollectorService(), nodeSearchCounts); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + aliasFilter = buildPerIndexAliasFilter(clusterState, indicesAndAliases, indices, remoteAliasMap); + final Map finalIndicesMap = + buildPerIndexOriginalIndices(clusterState, indicesAndAliases, indices, searchRequest.indicesOptions()); localShardIterators = StreamSupport.stream(localShardRoutings.spliterator(), false) - .map(it -> new SearchShardIterator( - searchRequest.getLocalClusterAlias(), it.shardId(), it.getShardRoutings(), localIndices)) + .map(it -> { + OriginalIndices finalIndices = finalIndicesMap.get(it.shardId().getIndex().getUUID()); + assert finalIndices != null; + return new SearchShardIterator(searchRequest.getLocalClusterAlias(), it.shardId(), it.getShardRoutings(), finalIndices); + }) .collect(Collectors.toList()); - aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); } final GroupShardsIterator shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); @@ -950,7 +994,9 @@ static List getLocalLocalShardsIteratorFromPointInTime(Clus } } } - iterators.add(new SearchShardIterator(localClusterAlias, shardId, targetNodes, originalIndices, + OriginalIndices finalIndices = new OriginalIndices(new String[] { shardId.getIndexName() }, + originalIndices.indicesOptions()); + iterators.add(new SearchShardIterator(localClusterAlias, shardId, targetNodes, finalIndices, perNode.getSearchContextId(), keepAlive)); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java index 29540da2c0f5e..2545b3b4ce26a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java @@ -139,6 +139,10 @@ public CompressedXContent getFilter() { return filter; } + public boolean filteringRequired() { + return filter != null; + } + /** * Returns a new {@link DataStreamAlias} instance with the provided data stream name added to it as a new member. * If the provided isWriteDataStream is set to true then the provided data stream is also set as write data stream. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 50429a4088b78..2b6a87a9c8a72 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -511,7 +511,8 @@ public Set resolveExpressions(ClusterState state, String... expressions) * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { - return indexAliases(state, index, AliasMetadata::filteringRequired, false, resolvedExpressions); + return indexAliases(state, index, + AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } /** @@ -530,8 +531,12 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * the index itself - null is returned. Returns {@code null} if no filtering is required. *

NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. */ - public String[] indexAliases(ClusterState state, String index, Predicate requiredAlias, boolean skipIdentity, - Set resolvedExpressions) { + public String[] indexAliases(ClusterState state, + String index, + Predicate requiredAlias, + Predicate requiredDataStreamAlias, + boolean skipIdentity, + Set resolvedExpressions) { if (isAllIndices(resolvedExpressions)) { return null; } @@ -558,7 +563,7 @@ public String[] indexAliases(ClusterState state, String index, Predicate dataStreamAlias.getDataStreams().contains(dataStream.getName())) - .filter(dataStreamAlias -> dataStreamAlias.getFilter() != null) + .filter(requiredDataStreamAlias) .map(DataStreamAlias::getName) .toArray(String[]::new); } else { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index ab81722755874..00db880d27980 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -1526,7 +1526,7 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set filterParser = bytes -> { try (InputStream inputStream = bytes.streamInput(); XContentParser parser = XContentFactory.xContentType(inputStream).xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, inputStream)) { + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, inputStream)) { return parseInnerQueryBuilder(parser); } }; diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 282fe196cc520..bd3ee22811807 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.ParsingException; @@ -800,7 +799,7 @@ public static SearchHit createFromMap(Map values) { String nodeId = get(Fields._NODE, values, null); if (shardId != null && nodeId != null) { assert shardId.getIndexName().equals(index); - searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE)); + searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias)); } else { //these fields get set anyways when setting the shard target, //but we set them explicitly when we don't have enough info to rebuild the shard target diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 175f028435ed6..95538c885b21d 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -18,7 +18,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; @@ -808,7 +807,7 @@ private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSear DefaultSearchContext searchContext = null; try { SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), - reader.indexShard().shardId(), request.getClusterAlias(), OriginalIndices.NONE); + reader.indexShard().shardId(), request.getClusterAlias()); searchContext = new DefaultSearchContext(reader, request, shardTarget, threadPool::relativeTimeInMillis, timeout, fetchPhase, lowLevelCancellation); // we clone the query shard context here just for rewriting otherwise we diff --git a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 98e1cc97f6d46..974829c985dd2 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -8,7 +8,6 @@ package org.elasticsearch.search; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.core.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,9 +26,6 @@ public final class SearchShardTarget implements Writeable, Comparable randomExceptions() { actual = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{ new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)) + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)) }); expected = new ElasticsearchException("Elasticsearch exception [type=search_phase_execution_exception, " + "reason=all shards failed]"); diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 9880f5ba14658..40a1647c85d6a 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -277,7 +276,7 @@ public void testQueryShardException() throws IOException { } public void testSearchException() throws IOException { - SearchShardTarget target = new SearchShardTarget("foo", new ShardId("bar", "_na_", 1), null, OriginalIndices.NONE); + SearchShardTarget target = new SearchShardTarget("foo", new ShardId("bar", "_na_", 1), null); SearchException ex = serialize(new SearchException(target, "hello world")); assertEquals(target, ex.shard()); assertEquals(ex.getMessage(), "hello world"); diff --git a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java index 0e47493150c95..212117e8d41c9 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java @@ -11,7 +11,6 @@ import com.fasterxml.jackson.core.JsonParseException; import org.apache.commons.codec.DecoderException; import org.apache.lucene.index.CorruptIndexException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -123,7 +122,7 @@ private static ShardSearchFailure createShardFailureParsingException(String erro private static SearchShardTarget createSearchShardTarget(String nodeId, int shardId, String index, String clusterAlias) { return new SearchShardTarget(nodeId, - new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias); } public void testGroupByNullTarget() { diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 51d1d9b2637c7..6c1e26c11dd05 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -72,7 +72,7 @@ private AbstractSearchAsyncAction createAction(SearchRequest resolvedNodes.add(Tuple.tuple(cluster, node)); return null; }; - + OriginalIndices originalIndices = new OriginalIndices(request.indices(), request.indicesOptions()); return new AbstractSearchAsyncAction("test", logger, null, nodeIdToConnection, Collections.singletonMap("foo", new AliasFilter(new MatchAllQueryBuilder())), Collections.singletonMap("foo", 2.0f), null, request, listener, @@ -104,6 +104,11 @@ public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.C OriginalIndices originalIndices) { releasedContexts.add(contextId); } + + @Override + public OriginalIndices getOriginalIndices(int shardIndex) { + return originalIndices; + } }; } @@ -159,7 +164,7 @@ public void testSendSearchResponseDisallowPartialFailures() { ShardId failureShardId = new ShardId("index", "index-uuid", i); String failureClusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); String failureNodeId = randomAlphaOfLengthBetween(5, 10); - action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias, OriginalIndices.NONE), + action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias), new IllegalArgumentException()); } action.sendSearchResponse(InternalSearchResponse.empty(), phaseResults.results); @@ -235,7 +240,7 @@ private static ArraySearchPhaseResults phaseResults(Set {}); } diff --git a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java index 280af9ad7d2cc..39667c81f3cdc 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java @@ -85,13 +85,13 @@ public void testClearScrollIds() throws IOException, InterruptedException { AtomicArray array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 1), node1); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 12), node2); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 42), node3); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); @@ -149,13 +149,13 @@ public void testClearScrollIdsWithFailure() throws IOException, InterruptedExcep AtomicArray array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 1), node1); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 12), node2); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 42), node3); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); diff --git a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java index a46fb722d1733..ed010c5c146de 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.action.search; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; @@ -54,13 +53,13 @@ public void testCollect() throws InterruptedException { new ShardSearchContextId(UUIDs.randomBase64UUID(), shardID), null, null); dfsSearchResult.setShardIndex(shardID); dfsSearchResult.setSearchShardTarget(new SearchShardTarget("foo", - new ShardId("bar", "baz", shardID), null, OriginalIndices.NONE)); + new ShardId("bar", "baz", shardID), null)); collector.onResult(dfsSearchResult);}); break; case 2: state.add(2); executor.execute(() -> collector.onFailure(shardID, new SearchShardTarget("foo", new ShardId("bar", "baz", shardID), - null, OriginalIndices.NONE), new RuntimeException("boom"))); + null), new RuntimeException("boom"))); break; default: fail("unknown state"); diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index fd5e8a424b273..cab54fdc78a1e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; @@ -47,9 +46,9 @@ public void testDfsWith2Shards() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null))); results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -59,7 +58,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest SearchActionListener listener) { if (request.contextId().getId() == 1) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -67,7 +66,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest listener.onResponse(queryResult); } else if (request.contextId().getId() == 2) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -111,9 +110,9 @@ public void testDfsWith1ShardFailed() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null))); results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -124,7 +123,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest if (request.contextId().getId() == 1) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), new SearchShardTarget("node1", new ShardId("test", "na", 0), - null, OriginalIndices.NONE), null); + null), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs( new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -173,9 +172,9 @@ public void testFailPhaseOnException() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null))); results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -185,7 +184,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest SearchActionListener listener) { if (request.contextId().getId() == 1) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 4b215fc2a1183..11013d9905291 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; @@ -57,7 +56,7 @@ public void testShortcutQueryAndFetchOptimization() { if (hasHits) { QuerySearchResult queryResult = new QuerySearchResult(); queryResult.setSearchShardTarget(new SearchShardTarget("node0", - new ShardId("index", "index", 0), null, OriginalIndices.NONE)); + new ShardId("index", "index", 0), null)); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]); addProfiling(profiled, queryResult); @@ -115,7 +114,7 @@ public void testFetchTwoDocument() { boolean profiled = randomBoolean(); ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -125,7 +124,7 @@ public void testFetchTwoDocument() { results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -183,7 +182,7 @@ public void testFailFetchOneDoc() { boolean profiled = randomBoolean(); final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -192,7 +191,7 @@ public void testFailFetchOneDoc() { addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -269,7 +268,7 @@ public void testFetchDocsConcurrently() throws InterruptedException { mockSearchPhaseContext.getRequest(), numHits, exc -> {}); SearchShardTarget[] shardTargets = new SearchShardTarget[numHits]; for (int i = 0; i < numHits; i++) { - shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null, OriginalIndices.NONE); + shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), shardTargets[i], null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]); @@ -348,7 +347,7 @@ public void testExceptionFailsPhase() { int resultSetSize = randomIntBetween(2, 10); boolean profiled = randomBoolean(); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -357,7 +356,7 @@ public void testExceptionFailsPhase() { addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -413,7 +412,7 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s boolean profiled = randomBoolean(); final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -423,7 +422,7 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index d579e492f9465..c609f275638f1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -74,6 +74,11 @@ public SearchRequest getRequest() { return searchRequest; } + @Override + public OriginalIndices getOriginalIndices(int shardIndex) { + return new OriginalIndices(searchRequest.indices(), searchRequest.indicesOptions()); + } + @Override public void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults) { String scrollId = getRequest().scroll() != null ? TransportSearchHelper.buildScrollId(queryResults) : null; diff --git a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java index 2440a3cb437c1..e190dea1a9f1e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; @@ -95,7 +94,7 @@ public void testProgressListenerExceptionsAreCaught() throws Exception { for (int i = 0; i < 10; i++) { SearchShardTarget searchShardTarget = new SearchShardTarget("node", new ShardId("index", "uuid", i), - null, OriginalIndices.NONE); + null); QuerySearchResult querySearchResult = new QuerySearchResult(); TopDocs topDocs = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]); querySearchResult.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index f140ee1d29b6d..4bfd65967a4c2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -19,7 +19,6 @@ import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -319,7 +318,7 @@ private static AtomicArray generateQueryResults( for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { String clusterAlias = randomBoolean() ? null : "remote"; SearchShardTarget searchShardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), - clusterAlias, OriginalIndices.NONE); + clusterAlias); QuerySearchResult querySearchResult = new QuerySearchResult(new ShardSearchContextId("", shardIndex), searchShardTarget, null); final TopDocs topDocs; float maxScore = 0; @@ -465,13 +464,13 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { QuerySearchResult empty = QuerySearchResult.nullInstance(); int shardId = 2 + numEmptyResponses; empty.setShardIndex(2+numEmptyResponses); - empty.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null, OriginalIndices.NONE)); + empty.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null)); consumer.consumeResult(empty, latch::countDown); numEmptyResponses --; } QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", 0), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", 0), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); InternalAggregations aggs = InternalAggregations.from(singletonList(new InternalMax("test", 1.0D, DocValueFormat.RAW, emptyMap()))); @@ -480,7 +479,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { consumer.consumeResult(result, latch::countDown); result = new QuerySearchResult(new ShardSearchContextId("", 1), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", 0), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); aggs = InternalAggregations.from(singletonList(new InternalMax("test", 3.0D, DocValueFormat.RAW, emptyMap()))); @@ -489,7 +488,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { consumer.consumeResult(result, latch::countDown); result = new QuerySearchResult(new ShardSearchContextId("", 1), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", 0), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); aggs = InternalAggregations.from(singletonList(new InternalMax("test", 2.0D, DocValueFormat.RAW, emptyMap()))); @@ -501,7 +500,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { result = QuerySearchResult.nullInstance(); int shardId = 2 + numEmptyResponses; result.setShardIndex(shardId); - result.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null, OriginalIndices.NONE)); + result.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null)); consumer.consumeResult(result, latch::countDown); numEmptyResponses--; @@ -555,7 +554,7 @@ public void testConsumerConcurrently() throws Exception { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", id), - new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", id), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); @@ -603,7 +602,7 @@ public void testConsumerOnlyAggs() throws Exception { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), new DocValueFormat[0]); InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new InternalMax("test", (double) number, @@ -645,7 +644,7 @@ public void testConsumerOnlyHits() throws Exception { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); result.setShardIndex(i); @@ -687,7 +686,7 @@ public void testReduceTopNWithFromOffset() throws Exception { CountDownLatch latch = new CountDownLatch(4); for (int i = 0; i < 4; i++) { QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); ScoreDoc[] docs = new ScoreDoc[3]; for (int j = 0; j < docs.length; j++) { docs[j] = new ScoreDoc(0, score--); @@ -732,7 +731,7 @@ public void testConsumerSortByField() throws Exception { FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, new Object[]{number})}; TopDocs topDocs = new TopFieldDocs(new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); result.setShardIndex(i); result.size(size); @@ -773,7 +772,7 @@ public void testConsumerFieldCollapsing() throws Exception { FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, values)}; TopDocs topDocs = new CollapseTopFieldDocs("field", new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields, values); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); result.setShardIndex(i); result.size(size); @@ -809,7 +808,7 @@ public void testConsumerSuggestions() throws Exception { CountDownLatch latch = new CountDownLatch(expectedNumResults); for (int i = 0; i < expectedNumResults; i++) { QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); List>> suggestions = new ArrayList<>(); { @@ -938,7 +937,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", id), - new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", id), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[]{new ScoreDoc(0, number)}), number), new DocValueFormat[0]); @@ -1004,7 +1003,7 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t final int index = i; threads[index] = new Thread(() -> { QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), index), - new SearchShardTarget("node", new ShardId("a", "b", index), null, OriginalIndices.NONE), + new SearchShardTarget("node", new ShardId("a", "b", index), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), @@ -1054,7 +1053,7 @@ public void testFailConsumeAggs() throws Exception { for (int i = 0; i < expectedNumResults; i++) { final int index = i; QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), index), - new SearchShardTarget("node", new ShardId("a", "b", index), null, OriginalIndices.NONE), + new SearchShardTarget("node", new ShardId("a", "b", index), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java index 4b4f482406d2a..454a2ee664605 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -38,11 +37,11 @@ public void testToXContent() throws IOException { SearchPhaseExecutionException exception = new SearchPhaseExecutionException("test", "all shards failed", new ShardSearchFailure[]{ new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 0), null, OriginalIndices.NONE)), + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 0), null)), new ShardSearchFailure(new IndexShardClosedException(new ShardId("foo", "_na_", 1)), - new SearchShardTarget("node_2", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)), + new SearchShardTarget("node_2", new ShardId("foo", "_na_", 1), null)), new ShardSearchFailure(new ParsingException(5, 7, "foobar", null), - new SearchShardTarget("node_3", new ShardId("foo", "_na_", 2), null, OriginalIndices.NONE)), + new SearchShardTarget("node_3", new ShardId("foo", "_na_", 2), null)), }); // Failures are grouped (by default) @@ -94,7 +93,7 @@ public void testToAndFromXContent() throws IOException { new NullPointerException() ); shardSearchFailures[i] = new ShardSearchFailure(cause, new SearchShardTarget("node_" + i, - new ShardId("test", "_na_", i), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", i), null)); } final String phase = randomFrom("query", "search", "other"); @@ -143,7 +142,7 @@ public void testPhaseFailureWithSearchShardFailure() { new InvalidIndexTemplateException("foo", "bar") ); shardSearchFailures[i] = new ShardSearchFailure(cause, new SearchShardTarget("node_" + i, - new ShardId("test", "_na_", i), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", i), null)); } final String phase = randomFrom("fetch", "search", "other"); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index d449fbb977d56..0c5361d99b939 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -100,7 +100,7 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest numWithTopDocs.incrementAndGet(); } QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); if (withCollapse) { queryResult.topDocs(new TopDocsAndMaxScore( @@ -313,7 +313,7 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest SearchTask task, SearchActionListener listener) { int shardId = request.shardId().id(); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); if (shardId == 0) { queryResult.topDocs(new TopDocsAndMaxScore(new TopFieldDocs( @@ -412,7 +412,7 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest SearchTask task, SearchActionListener listener) { int shardId = request.shardId().id(); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); if (shardId == 0) { queryResult.topDocs(new TopDocsAndMaxScore(new TopFieldDocs( @@ -471,7 +471,7 @@ public void run() { assertThat(phase.totalHits.value, equalTo(2L)); assertThat(phase.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); - SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null, OriginalIndices.NONE); + SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { @Override public void onFailure(Exception e) { } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index b6d9e40878f77..7663525372847 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.common.text.Text; import org.elasticsearch.core.Tuple; @@ -121,7 +120,7 @@ public void testMergeShardFailures() throws InterruptedException { ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; for (int j = 0; j < numFailures; j++) { ShardId shardId = new ShardId(randomFrom(indices), j); - SearchShardTarget searchShardTarget = new SearchShardTarget(randomAlphaOfLength(6), shardId, clusterAlias, null); + SearchShardTarget searchShardTarget = new SearchShardTarget(randomAlphaOfLength(6), shardId, clusterAlias); ShardSearchFailure failure = new ShardSearchFailure(new IllegalArgumentException(), searchShardTarget); shardSearchFailures[j] = failure; priorityQueue.add(Tuple.tuple(searchShardTarget, failure)); @@ -252,7 +251,7 @@ public void testMergeCompletionSuggestions() throws InterruptedException { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomIntBetween(0, Integer.MAX_VALUE)); String clusterAlias = randomBoolean() ? "" : randomAlphaOfLengthBetween(5, 10); - hit.shard(new SearchShardTarget("node", shardId, clusterAlias, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("node", shardId, clusterAlias)); option.setHit(hit); options.addOption(option); completionSuggestion.addTerm(options); @@ -300,7 +299,7 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedExceptio Collections.emptyMap()); SearchHit searchHit = new SearchHit(docId); searchHit.shard(new SearchShardTarget("node", new ShardId("index", "uuid", randomIntBetween(0, Integer.MAX_VALUE)), - randomBoolean() ? RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY : randomAlphaOfLengthBetween(5, 10), OriginalIndices.NONE)); + randomBoolean() ? RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY : randomAlphaOfLengthBetween(5, 10))); option.setHit(searchHit); options.addOption(option); completionSuggestion.addTerm(options); @@ -664,7 +663,7 @@ private static SearchHit[] randomSearchHitArray(int numDocs, int numResponses, S for (int j = 0; j < numDocs; j++) { ShardId shardId = new ShardId(randomFrom(indices), randomIntBetween(0, 10)); SearchShardTarget shardTarget = new SearchShardTarget(randomAlphaOfLengthBetween(3, 8), shardId, - clusterAlias, OriginalIndices.NONE); + clusterAlias); SearchHit hit = new SearchHit(randomIntBetween(0, Integer.MAX_VALUE)); float score = Float.NaN; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java index dc76e9d23a0e4..1fe511521f8c9 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.UUIDs; @@ -66,7 +65,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); }).start(); } @@ -157,7 +156,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); }).start(); } @@ -230,7 +229,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); }).start(); } @@ -308,7 +307,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); } }).start(); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java index fb3143c089e4b..f30eb0744f6ee 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java @@ -64,7 +64,6 @@ public void testNewSearchShardTarget() { assertEquals(clusterAlias, searchShardTarget.getClusterAlias()); assertSame(shardId, searchShardTarget.getShardId()); assertEquals(nodeId, searchShardTarget.getNodeId()); - assertSame(originalIndices, searchShardTarget.getOriginalIndices()); } public void testEqualsAndHashcode() { diff --git a/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java b/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java index 724b158a0e0d4..c84687a7affe5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.search; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; @@ -37,7 +36,7 @@ public static ShardSearchFailure createTestItem(String indexUuid) { String indexName = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? randomAlphaOfLengthBetween(5, 10) : null; searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, indexUuid), randomInt()), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(indexName, indexUuid), randomInt()), clusterAlias); } return new ShardSearchFailure(ex, searchShardTarget); } @@ -91,7 +90,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws public void testToXContent() throws IOException { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(0, 0, "some message", null), - new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), null, OriginalIndices.NONE)); + new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), null)); BytesReference xContent = toXContent(failure, XContentType.JSON, randomBoolean()); assertEquals( "{\"shard\":123," @@ -109,7 +108,7 @@ public void testToXContent() throws IOException { public void testToXContentWithClusterAlias() throws IOException { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(0, 0, "some message", null), - new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), "cluster1", OriginalIndices.NONE)); + new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), "cluster1")); BytesReference xContent = toXContent(failure, XContentType.JSON, randomBoolean()); assertEquals( "{\"shard\":123," diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index d62a64d54914b..c76ef34298f9b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -974,6 +974,7 @@ public void testLocalShardIteratorFromPointInTime() { final IndexMetadata indexMetadata = clusterState.metadata().index("test-1"); Map contexts = new HashMap<>(); Set relocatedContexts = new HashSet<>(); + Map aliasFilterMap = new HashMap<>(); for (int shardId = 0; shardId < numberOfShards; shardId++) { final String targetNode; if (randomBoolean()) { @@ -987,13 +988,15 @@ public void testLocalShardIteratorFromPointInTime() { contexts.put(new ShardId(indexMetadata.getIndex(), shardId), new SearchContextIdForNode(null, targetNode, new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong(), null))); + aliasFilterMap.putIfAbsent(indexMetadata.getIndexUUID(), AliasFilter.EMPTY); } TimeValue keepAlive = randomBoolean() ? null : TimeValue.timeValueSeconds(between(30, 3600)); + final List shardIterators = TransportSearchAction.getLocalLocalShardsIteratorFromPointInTime( clusterState, OriginalIndices.NONE, null, - new SearchContextId(contexts, Map.of()), + new SearchContextId(contexts, aliasFilterMap), keepAlive ); shardIterators.sort(Comparator.comparing(SearchShardIterator::shardId)); diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java index 432b9af280804..4fb6866f04b0c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java @@ -27,13 +27,13 @@ public static AtomicArray generateQueryResults() { DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("a", 1), node1); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), "cluster_x", null)); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), "cluster_x")); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("b", 12), node2); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), "cluster_y", null)); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), "cluster_y")); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("c", 42), node3); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 7516bb4dff268..6700b12fcd6fb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -1423,11 +1423,11 @@ public void testIndexAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); - String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, true, resolvedExpressions); + String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias-0", "test-alias-1", "test-alias-non-filtering"}, strings); - strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> x.alias().equals("test-alias-1"), true, + strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> x.alias().equals("test-alias-1"), x -> false, true, resolvedExpressions); assertArrayEquals(null, strings); } @@ -1452,21 +1452,21 @@ public void testIndexAliasesDataStreamAliases() { // Only resolve aliases with with that refer to dataStreamName1 Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); - String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, true, resolvedExpressions); - assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs")); + String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); + assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar")); } { // Only resolve aliases with with that refer to dataStreamName2 Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); - String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, true, resolvedExpressions); - assertThat(result, arrayContainingInAnyOrder("logs_baz")); + String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); + assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2")); } { // Null is returned, because skipping identity check and resolvedExpressions contains the backing index name Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); - String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, false, resolvedExpressions); + String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, false, resolvedExpressions); assertThat(result, nullValue()); } } @@ -1480,15 +1480,15 @@ public void testIndexAliasesSkipIdentity() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); - String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, false, resolvedExpressions); + String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); - aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, true, resolvedExpressions); + aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] {"test-alias"}, aliases); resolvedExpressions = Collections.singleton("other-alias"); - aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, false, resolvedExpressions); + aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertArrayEquals(new String[] {"other-alias"}, aliases); - aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, true, resolvedExpressions); + aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] {"other-alias"}, aliases); } @@ -1499,7 +1499,7 @@ public void testConcreteWriteIndexSuccessful() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); IndicesRequest request = new IndicesRequest() { @@ -1570,7 +1570,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); IndicesRequest request = new IndicesRequest() { @@ -1603,7 +1603,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(false))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), @@ -1623,7 +1623,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(false, null)))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), @@ -1644,7 +1644,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(test0WriteIndex == false, null)))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, @@ -2268,7 +2268,7 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 1, epochMillis))); assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); - assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis)));; + assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; diff --git a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 9fdeab771877e..9c80efe5eaac4 100644 --- a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; @@ -139,9 +138,9 @@ public void testConvert() throws IOException { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)); + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 2), null, OriginalIndices.NONE)); + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 2), null)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java index dfc9f6053b49b..cf8fdad583c98 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.rest.action; import com.fasterxml.jackson.core.io.JsonEOFException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -188,7 +187,7 @@ private static ShardSearchFailure createShardFailureParsingException(String node private static SearchShardTarget createSearchShardTarget(String nodeId, int shardId, String index, String clusterAlias) { return new SearchShardTarget(nodeId, - new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index b5e0bc90cf8bc..332bffb8dec49 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.UUIDs; @@ -128,7 +127,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) { } }; - SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); + SearchShardTarget target = new SearchShardTarget("node", shardId, null); ReaderContext readerWithoutScroll = new ReaderContext( newContextId(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false); @@ -266,7 +265,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) { } } }; - SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); + SearchShardTarget target = new SearchShardTarget("node", shardId, null); ReaderContext readerContext = new ReaderContext( newContextId(), indexService, indexShard, searcherSupplier, randomNonNegativeLong(), false); DefaultSearchContext context = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 61623261b7faa..0b7cfb9db1436 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -129,7 +128,7 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp String index = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), - new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias, OriginalIndices.NONE)); + new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias)); } return hit; } @@ -215,7 +214,7 @@ public void testToXContent() throws IOException { public void testSerializeShardTarget() throws Exception { String clusterAlias = randomBoolean() ? null : "cluster_alias"; SearchShardTarget target = new SearchShardTarget("_node_id", new ShardId(new Index("_index", "_na_"), 0), - clusterAlias, OriginalIndices.NONE); + clusterAlias); Map innerHits = new HashMap<>(); SearchHit innerHit1 = new SearchHit(0, "_id", null, null); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 3b482451f37f7..85a476b42f9d5 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; @@ -219,7 +218,7 @@ public void testFromXContentWithShards() throws IOException { String index = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); final SearchShardTarget shardTarget = new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), - new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias); if (withExplanation) { hit.explanation(SearchHitTests.createExplanation(randomIntBetween(0, 5))); } diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java index 6bce43afaa573..f279d867a3e86 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java @@ -89,6 +89,6 @@ private static FetchSearchResult fetchResult(SearchShardTarget target, ProfileRe } private static SearchShardTarget randomTarget() { - return new SearchShardTarget(randomAlphaOfLength(5), new ShardId(randomAlphaOfLength(5), "uuid", randomInt(6)), null, null); + return new SearchShardTarget(randomAlphaOfLength(5), new ShardId(randomAlphaOfLength(5), "uuid", randomInt(6)), null); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index 2e77f6bff064c..3e7e8f71dfea7 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndicesTests; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; @@ -51,7 +50,7 @@ private static QuerySearchResult createTestInstance() throws Exception { ShardSearchRequest shardSearchRequest = new ShardSearchRequest(OriginalIndicesTests.randomOriginalIndices(), searchRequest, shardId, 0, 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, randomNonNegativeLong(), null); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.base64UUID(), randomLong()), - new SearchShardTarget("node", shardId, null, OriginalIndices.NONE), shardSearchRequest); + new SearchShardTarget("node", shardId, null), shardSearchRequest); if (randomBoolean()) { result.terminatedEarly(randomBoolean()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 06202619592fb..236e2032b177e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.core.TimeValue; @@ -55,7 +54,7 @@ public class TestSearchContext extends SearchContext { public static final SearchShardTarget SHARD_TARGET = - new SearchShardTarget("test", new ShardId("test", "test", 0), null, OriginalIndices.NONE); + new SearchShardTarget("test", new ShardId("test", "test", 0), null); final IndexService indexService; final BitsetFilterCache fixedBitSetFilterCache; diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java index f4269492cd4d9..4affb8204459e 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -246,7 +245,7 @@ public void testWithFetchFailures() throws InterruptedException { IOException failure = new IOException("boum"); //fetch failures are currently ignored, they come back with onFailure or onResponse anyways task.getSearchProgressActionListener().onFetchFailure(i, - new SearchShardTarget("0", new ShardId("0", "0", 1), null, OriginalIndices.NONE), + new SearchShardTarget("0", new ShardId("0", "0", 1), null), failure); shardSearchFailures[i] = new ShardSearchFailure(failure); } @@ -280,7 +279,7 @@ public void testFatalFailureDuringFetch() throws InterruptedException { for (int i = 0; i < numShards; i++) { //fetch failures are currently ignored, they come back with onFailure or onResponse anyways task.getSearchProgressActionListener().onFetchFailure(i, - new SearchShardTarget("0", new ShardId("0", "0", 1), null, OriginalIndices.NONE), + new SearchShardTarget("0", new ShardId("0", "0", 1), null), new IOException("boum")); } assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 99d4833e32a13..b012f88a7f7c9 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -25,6 +25,7 @@ dependencies { testImplementation project(path: xpackModule('sql:sql-action')) testImplementation project(path: ':modules:analysis-common') testImplementation project(path: ':modules:reindex') + testImplementation project(path: xpackModule('data-streams')) testImplementation project(":client:rest-high-level") testImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java index 41e70fad926b7..91210395c38af 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java @@ -7,16 +7,35 @@ package org.elasticsearch.integration; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.xpack.core.action.CreateDataStreamAction; +import org.elasticsearch.xpack.core.action.DeleteDataStreamAction; +import org.elasticsearch.xpack.datastreams.DataStreamsPlugin; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -67,7 +86,14 @@ public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .build(); } - public void testResolveWildcardsRegexs() throws Exception { + @Override + protected Collection> nodePlugins() { + List> lst = new ArrayList<>(super.nodePlugins()); + lst.add(DataStreamsPlugin.class); + return lst; + } + + public void testGetResolveWildcardsRegexs() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setMapping("field1", "type=text", "field2", "type=text") .addAlias(new Alias("my_alias")) @@ -99,4 +125,171 @@ public void testResolveWildcardsRegexs() throws Exception { assertThat((String) getResponse.getSource().get("field3"), equalTo("value3")); } + public void testSearchResolveWildcardsRegexs() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test") + .setMapping("field1", "type=text", "field2", "type=text") + .addAlias(new Alias("my_alias")) + .addAlias(new Alias("an_alias")) + ); + client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2", "field3", "value3") + .setRefreshPolicy(IMMEDIATE) + .get(); + + SearchResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + Map source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field1"), equalTo("value1")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("my_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field2"), equalTo("value2")); + + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("an_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias", "t*") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat((String) source.get("field1"), equalTo("value1")); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } + + public void testSearchResolveDataStreams() throws Exception { + putComposableIndexTemplate("id1", List.of("test*")); + CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("test"); + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).get(); + + IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(); + aliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD) + .aliases("my_alias", "an_alias") + .index("test")); + assertAcked(client().admin().indices().aliases(aliasesRequest).actionGet()); + + try { + String value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + client().prepareIndex("test") + .setCreate(true) + .setId("1") + .setSource(DEFAULT_TIMESTAMP_FIELD, value, "field1", "value1", "field2", "value2", "field3", "value3") + .setRefreshPolicy(IMMEDIATE) + .get(); + + SearchResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + Map source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field1"), equalTo("value1")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("my_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field2"), equalTo("value2")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("an_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias", "t*") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat((String) source.get("field1"), equalTo("value1")); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } finally { + AcknowledgedResponse response = client().execute( + DeleteDataStreamAction.INSTANCE, + new DeleteDataStreamAction.Request(new String[]{"*"}) + ).actionGet(); + assertAcked(response); + + DeleteDataStreamAction.Request deleteDSRequest = new DeleteDataStreamAction.Request(new String[]{"*"}); + client().execute(DeleteDataStreamAction.INSTANCE, deleteDSRequest).actionGet(); + DeleteComposableIndexTemplateAction.Request deleteTemplateRequest = new DeleteComposableIndexTemplateAction.Request("*"); + client().execute(DeleteComposableIndexTemplateAction.INSTANCE, deleteTemplateRequest).actionGet(); + } + } + + private void putComposableIndexTemplate(String id, List patterns) throws IOException { + PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); + request.indexTemplate( + new ComposableIndexTemplate( + patterns, + new Template(null, null, null), + null, + null, + null, + null, + new ComposableIndexTemplate.DataStreamTemplate(), + null + ) + ); + client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java index 2b7b9c5b84214..852065b87be90 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java @@ -35,7 +35,7 @@ public void testFromSearchHit() { String index = randomAlphaOfLength(10); searchHit.setSeqNo(seqNo); searchHit.setPrimaryTerm(primaryTerm); - searchHit.shard(new SearchShardTarget("anynode", new ShardId(index, randomAlphaOfLength(10), 1), null, null)); + searchHit.shard(new SearchShardTarget("anynode", new ShardId(index, randomAlphaOfLength(10), 1), null)); assertThat(SeqNoPrimaryTermAndIndex.fromSearchHit(searchHit), equalTo(new SeqNoPrimaryTermAndIndex(seqNo, primaryTerm, index))); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index 232c650dc496b..e7f32e10b9831 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.condition; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.xcontent.ToXContent; @@ -78,7 +77,7 @@ public void testExecuteAccessHits() throws Exception { Clock.systemUTC()); SearchHit hit = new SearchHit(0, "1", null, null); hit.score(1f); - hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); InternalSearchResponse internalSearchResponse = new InternalSearchResponse( new SearchHits(new SearchHit[]{hit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index cdbe141a683bc..3342ee1ac1c90 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; @@ -181,7 +180,7 @@ void stopExecutor() { String id = String.valueOf(i); SearchHit hit = new SearchHit(1, id, Collections.emptyMap(), Collections.emptyMap()); hit.version(1L); - hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever")); hits[i] = hit; boolean active = randomBoolean(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index f88b167835297..145219853e6b4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; @@ -218,7 +217,7 @@ public void testFindTriggeredWatchesGoodCase() { BytesArray source = new BytesArray("{}"); SearchHit hit = new SearchHit(0, "first_foo", null, null); hit.version(1L); - hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); SearchHits hits = new SearchHits(new SearchHit[]{hit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); when(searchResponse1.getHits()).thenReturn(hits); @@ -233,7 +232,7 @@ public void testFindTriggeredWatchesGoodCase() { // First return a scroll response with a single hit and then with no hits hit = new SearchHit(0, "second_foo", null, null); hit.version(1L); - hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); hits = new SearchHits(new SearchHit[]{hit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); SearchResponse searchResponse2 = new SearchResponse(