diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java index 64ad19a9a96e1..7b53bb55f51bd 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.QueryPhaseResultConsumer; import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchProgressListener; @@ -183,9 +182,7 @@ public SearchPhaseController.ReducedQueryPhase reduceAggs(TermsList candidateLis new DocValueFormat[] { DocValueFormat.RAW } ); result.aggregations(candidateList.get(i)); - result.setSearchShardTarget( - new SearchShardTarget("node", new ShardId(new Index("index", "index"), i), null, OriginalIndices.NONE) - ); + result.setSearchShardTarget(new SearchShardTarget("node", new ShardId(new Index("index", "index"), i), null)); shards.add(result); } SearchRequest request = new SearchRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java index 7f2500656d1cd..46eede441529a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java @@ -80,7 +80,7 @@ private static ShardSearchFailure createShardFailureTestItem() { String nodeId = randomAlphaOfLengthBetween(5, 10); String indexName = randomAlphaOfLengthBetween(5, 10); searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null, null); + new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null); } return new ShardSearchFailure(ex, searchShardTarget); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index fcd83ecf639c2..471217728458b 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -65,7 +64,7 @@ public void testDCGAt() { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA); @@ -116,7 +115,7 @@ public void testDCGAtSixMissingRatings() { } hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, rated); @@ -174,7 +173,7 @@ public void testDCGAtFourMoreRatings() { for (int i = 0; i < 4; i++) { hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java index d0d27822c9332..adaa984b4a817 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -46,7 +45,7 @@ public static EvalQueryQuality randomEvalQueryQuality() { for (int i = 0; i < numberOfSearchHits; i++) { RatedSearchHit ratedSearchHit = RatedSearchHitTests.randomRatedSearchHit(); // we need to associate each hit with an index name otherwise rendering will not work - ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null, OriginalIndices.NONE)); + ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null)); ratedHits.add(ratedSearchHit); } EvalQueryQuality evalQueryQuality = new EvalQueryQuality(randomAlphaOfLength(10), diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java index 74036530b60f6..9bec9d5cd5504 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -108,7 +107,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva } hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } return hits; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index 534da1309ecf1..08a3cb71dc0d6 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -194,7 +193,7 @@ private static SearchHit[] createSearchHits(int from, int to, String index) { SearchHit[] hits = new SearchHit[to + 1 - from]; for (int i = from; i <= to; i++) { hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); } return hits; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index dbac98bb32cb8..e0968fbd937af 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -104,7 +103,7 @@ public void testIgnoreUnlabeled() { // add an unlabeled search hit SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001); @@ -124,7 +123,7 @@ public void testNoRatedDocs() throws Exception { for (int i = 0; i < 5; i++) { hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); assertEquals(0.0d, evaluated.metricScore(), 0.00001); @@ -246,7 +245,7 @@ private static SearchHit[] toSearchHits(List rated, String index) SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); } return hits; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 4fa31b22f519d..5904cfa543260 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -60,7 +59,7 @@ public class RankEvalResponseTests extends ESTestCase { new IllegalArgumentException("Closed resource", new RuntimeException("Resource")), new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] { new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)) }), + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)) }), new ElasticsearchException("Parsing failed", new ParsingException(9, 42, "Wrong state", new NullPointerException("Unexpected null value"))) }; @@ -172,7 +171,7 @@ public void testToXContent() throws IOException { private static RatedSearchHit searchHit(String index, int docId, Integer rating) { SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap()); - hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); hit.score(1.0f); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java index 2dc7573018b8b..ced7f28253f2d 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.text.Text; @@ -105,7 +104,7 @@ public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[k]; for (int i = 0; i < k; i++) { hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null)); } EvalQueryQuality evaluated = (new RecallAtK()).evaluate("id", hits, Collections.emptyList()); @@ -227,7 +226,7 @@ private static SearchHit[] toSearchHits(List rated, String index) SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap()); - hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); + hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null)); } return hits; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 444f45144fbb3..5ebc70b225229 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -556,9 +556,10 @@ public void testSearchQueryThenFetch() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); clearInterceptedActions(); - assertSameIndices(searchRequest, SearchTransportService.QUERY_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); + assertIndicesSubset(Arrays.asList(searchRequest.indices()), SearchTransportService.QUERY_ACTION_NAME, + SearchTransportService.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices - assertSameIndicesOptionalRequests(searchRequest, SearchTransportService.FREE_CONTEXT_ACTION_NAME); + assertIndicesSubsetOptionalRequests(Arrays.asList(searchRequest.indices()), SearchTransportService.FREE_CONTEXT_ACTION_NAME); } public void testSearchDfsQueryThenFetch() throws Exception { @@ -577,10 +578,10 @@ public void testSearchDfsQueryThenFetch() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); clearInterceptedActions(); - assertSameIndices(searchRequest, SearchTransportService.DFS_ACTION_NAME, SearchTransportService.QUERY_ID_ACTION_NAME, - SearchTransportService.FETCH_ID_ACTION_NAME); + assertIndicesSubset(Arrays.asList(searchRequest.indices()), SearchTransportService.DFS_ACTION_NAME, + SearchTransportService.QUERY_ID_ACTION_NAME, SearchTransportService.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices - assertSameIndicesOptionalRequests(searchRequest, SearchTransportService.FREE_CONTEXT_ACTION_NAME); + assertIndicesSubsetOptionalRequests(Arrays.asList(searchRequest.indices()), SearchTransportService.FREE_CONTEXT_ACTION_NAME); } private static void assertSameIndices(IndicesRequest originalRequest, String... actions) { @@ -604,11 +605,22 @@ private static void assertSameIndices(IndicesRequest originalRequest, boolean op } } } + private static void assertIndicesSubset(List indices, String... actions) { + assertIndicesSubset(indices, false, actions); + } + + private static void assertIndicesSubsetOptionalRequests(List indices, String... actions) { + assertIndicesSubset(indices, true, actions); + } + + private static void assertIndicesSubset(List indices, boolean optional, String... actions) { //indices returned by each bulk shard request need to be a subset of the original indices for (String action : actions) { List requests = consumeTransportRequests(action); - assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); + if (optional == false) { + assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); + } for (TransportRequest internalRequest : requests) { IndicesRequest indicesRequest = convertRequest(internalRequest); for (String index : indicesRequest.indices()) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index d69a3df59be7a..8de9f0acd4c46 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -62,8 +62,8 @@ protected void masterOperation(final ClusterSearchShardsRequest request, final C Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); - final String[] aliases = indexNameExpressionResolver.indexAliases(clusterState, index, aliasMetadata -> true, true, - indicesAndAliases); + final String[] aliases = indexNameExpressionResolver.indexAliases(clusterState, index, + aliasMetadata -> true, dataStreamAlias -> true, true, indicesAndAliases); indicesAndFilters.put(index, new AliasFilter(aliasFilter.getQueryBuilder(), aliases)); } diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 38129742851bb..0467fe790e765 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.action.support.TransportActions; @@ -41,6 +42,7 @@ import java.util.ArrayDeque; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -66,6 +68,7 @@ abstract class AbstractSearchAsyncAction exten private final Executor executor; private final ActionListener listener; private final SearchRequest request; + /** * Used by subclasses to resolve node ids to DiscoveryNodes. **/ @@ -85,7 +88,8 @@ abstract class AbstractSearchAsyncAction exten protected final GroupShardsIterator toSkipShardsIts; protected final GroupShardsIterator shardsIts; - private final Map shardItIndexMap; + private final SearchShardIterator[] shardIterators; + private final Map shardIndexMap; private final int expectedTotalOps; private final AtomicInteger totalOps = new AtomicInteger(); private final int maxConcurrentRequestsPerNode; @@ -115,16 +119,18 @@ abstract class AbstractSearchAsyncAction exten } this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators); this.shardsIts = new GroupShardsIterator<>(iterators); - this.shardItIndexMap = new HashMap<>(); // we compute the shard index based on the natural order of the shards // that participate in the search request. This means that this number is // consistent between two requests that target the same shards. - List naturalOrder = new ArrayList<>(iterators); - CollectionUtil.timSort(naturalOrder); - for (int i = 0; i < naturalOrder.size(); i++) { - shardItIndexMap.put(naturalOrder.get(i), i); + Map shardMap = new HashMap<>(); + List searchIterators = new ArrayList<>(iterators); + CollectionUtil.timSort(searchIterators); + for (int i = 0; i < searchIterators.size(); i++) { + shardMap.put(searchIterators.get(i), i); } + this.shardIndexMap = Collections.unmodifiableMap(shardMap); + this.shardIterators = searchIterators.toArray(new SearchShardIterator[0]); // we need to add 1 for non active partition, since we count it in the total. This means for each shard in the iterator we sum up // it's number of active shards but use 1 as the default if no replica of a shard is active at this point. @@ -221,8 +227,8 @@ public final void run() { for (int i = 0; i < shardsIts.size(); i++) { final SearchShardIterator shardRoutings = shardsIts.get(i); assert shardRoutings.skip() == false; - assert shardItIndexMap.containsKey(shardRoutings); - int shardIndex = shardItIndexMap.get(shardRoutings); + assert shardIndexMap.containsKey(shardRoutings); + int shardIndex = shardIndexMap.get(shardRoutings); performPhaseOnShard(shardIndex, shardRoutings, shardRoutings.nextOrNull()); } } @@ -287,8 +293,7 @@ protected void performPhaseOnShard(final int shardIndex, final SearchShardIterat */ if (shard == null) { assert assertExecuteOnStartThread(); - SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), - shardIt.getClusterAlias(), shardIt.getOriginalIndices()); + SearchShardTarget unassignedShard = new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()); onShardFailure(shardIndex, unassignedShard, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } else { final PendingExecutions pendingExecutions = throttleConcurrentRequests ? @@ -608,6 +613,11 @@ public final SearchRequest getRequest() { return request; } + @Override + public OriginalIndices getOriginalIndices(int shardIndex) { + return shardIterators[shardIndex].getOriginalIndices(); + } + @Override public boolean isPartOfPointInTime(ShardSearchContextId contextId) { final PointInTimeBuilder pointInTimeBuilder = request.pointInTimeBuilder(); @@ -674,7 +684,7 @@ private void raisePhaseFailure(SearchPhaseExecutionException exception) { try { SearchShardTarget searchShardTarget = entry.getSearchShardTarget(); Transport.Connection connection = getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - sendReleaseSearchContext(entry.getContextId(), connection, searchShardTarget.getOriginalIndices()); + sendReleaseSearchContext(entry.getContextId(), connection, getOriginalIndices(entry.getShardIndex())); } catch (Exception inner) { inner.addSuppressed(exception); logger.trace("failed to release context", inner); @@ -723,9 +733,9 @@ public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shar AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); - ShardSearchRequest shardRequest = new ShardSearchRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), shardIndex, - getNumShards(), filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), - shardIt.getSearchContextId(), shardIt.getSearchContextKeepAlive()); + ShardSearchRequest shardRequest = new ShardSearchRequest(shardIt.getOriginalIndices(), request, + shardIt.shardId(), shardIndex, getNumShards(), filter, indexBoost, timeProvider.getAbsoluteStartMillis(), + shardIt.getClusterAlias(), shardIt.getSearchContextId(), shardIt.getSearchContextKeepAlive()); // if we already received a search result we can inform the shard that it // can return a null response if the request rewrites to match none rather // than creating an empty response in the search thread pool. diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index 09ae052bd1d5e..97726626abe55 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -148,8 +148,7 @@ protected void performPhaseOnShard(int shardIndex, SearchShardIterator shardIt, } CanMatchResponse result = new CanMatchResponse(canMatch, null); - result.setSearchShardTarget(shard == null ? new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias(), - shardIt.getOriginalIndices()) : shard); + result.setSearchShardTarget(shard == null ? new SearchShardTarget(null, shardIt.shardId(), shardIt.getClusterAlias()) : shard); result.setShardIndex(shardIndex); fork(() -> onShardResult(result, shardIt)); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index e6d095573f06d..50f26a9db1f93 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -64,13 +64,13 @@ public void run() throws IOException { searchResults.size(), () -> context.executeNextPhase(this, nextPhaseFactory.apply(queryResult)), context); for (final DfsSearchResult dfsResult : searchResults) { - final SearchShardTarget searchShardTarget = dfsResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - QuerySearchRequest querySearchRequest = new QuerySearchRequest(searchShardTarget.getOriginalIndices(), + final SearchShardTarget shardTarget = dfsResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); + QuerySearchRequest querySearchRequest = new QuerySearchRequest(context.getOriginalIndices(dfsResult.getShardIndex()), dfsResult.getContextId(), dfsResult.getShardSearchRequest(), dfs); final int shardIndex = dfsResult.getShardIndex(); searchTransportService.sendExecuteQuery(connection, querySearchRequest, context.getTask(), - new SearchActionListener(searchShardTarget, shardIndex) { + new SearchActionListener(shardTarget, shardIndex) { @Override protected void innerOnResponse(QuerySearchResult response) { @@ -86,15 +86,15 @@ public void onFailure(Exception exception) { try { context.getLogger().debug(() -> new ParameterizedMessage("[{}] Failed to execute query phase", querySearchRequest.contextId()), exception); - progressListener.notifyQueryFailure(shardIndex, searchShardTarget, exception); - counter.onFailure(shardIndex, searchShardTarget, exception); + progressListener.notifyQueryFailure(shardIndex, shardTarget, exception); + counter.onFailure(shardIndex, shardTarget, exception); } finally { if (context.isPartOfPointInTime(querySearchRequest.contextId()) == false) { // the query might not have been executed at all (for example because thread pool rejected // execution) and the search context that was created in dfs phase might not be released. // release it again to be in the safe side context.sendReleaseSearchContext( - querySearchRequest.contextId(), connection, searchShardTarget.getOriginalIndices()); + querySearchRequest.contextId(), connection, context.getOriginalIndices(shardIndex)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 0d73aa4e37d59..cbf8450b177cd 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -139,14 +139,13 @@ private void innerRun() throws Exception { // in any case we count down this result since we don't talk to this shard anymore counter.countDown(); } else { - SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), - searchShardTarget.getNodeId()); + SearchShardTarget shardTarget = queryResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getContextId(), i, entry, - lastEmittedDocPerShard, searchShardTarget.getOriginalIndices(), queryResult.getShardSearchRequest(), - queryResult.getRescoreDocIds()); - executeFetch(queryResult.getShardIndex(), searchShardTarget, counter, fetchSearchRequest, queryResult.queryResult(), - connection, fieldsOptionAdapter); + lastEmittedDocPerShard, context.getOriginalIndices(queryResult.getShardIndex()), + queryResult.getShardSearchRequest(), queryResult.getRescoreDocIds()); + executeFetch(queryResult.getShardIndex(), shardTarget, counter, fetchSearchRequest, + queryResult.queryResult(), connection, fieldsOptionAdapter); } } } @@ -206,9 +205,10 @@ private void releaseIrrelevantSearchContext(QuerySearchResult queryResult) { && context.getRequest().scroll() == null && (context.isPartOfPointInTime(queryResult.getContextId()) == false)) { try { - SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); - Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - context.sendReleaseSearchContext(queryResult.getContextId(), connection, searchShardTarget.getOriginalIndices()); + SearchShardTarget shardTarget = queryResult.getSearchShardTarget(); + Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); + context.sendReleaseSearchContext(queryResult.getContextId(), connection, + context.getOriginalIndices(queryResult.getShardIndex())); } catch (Exception e) { context.getLogger().trace("failed to release context", e); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index c0f2cf32aa6b6..5dc8e572cd8c1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -47,6 +47,11 @@ interface SearchPhaseContext extends Executor { */ SearchRequest getRequest(); + /** + * Returns the targeted {@link OriginalIndices} for the provided {@code shardIndex}. + */ + OriginalIndices getOriginalIndices(int shardIndex); + /** * Checks if the given context id is part of the point in time of this search (if exists). * We should not release search contexts that belong to the point in time during or after searches. diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index a2b5a92d1d07f..49067379983f5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -141,7 +141,7 @@ protected void setSearchShardTarget(T response) { // we need this down the road for subseq. phases SearchShardTarget searchShardTarget = response.getSearchShardTarget(); response.setSearchShardTarget(new SearchShardTarget(searchShardTarget.getNodeId(), searchShardTarget.getShardId(), - target.getClusterAlias(), null)); + target.getClusterAlias())); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java index 63ae879a58904..a3fc006e6fc92 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java @@ -85,7 +85,7 @@ public String getClusterAlias() { SearchShardTarget nextOrNull() { final String nodeId = targetNodesIterator.nextOrNull(); if (nodeId != null) { - return new SearchShardTarget(nodeId, shardId, clusterAlias, originalIndices); + return new SearchShardTarget(nodeId, shardId, clusterAlias); } return null; } diff --git a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 4e8f3d37d7959..fc53708d69f34 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.core.Nullable; @@ -158,7 +157,7 @@ public static ShardSearchFailure fromXContent(XContentParser parser) throws IOEx SearchShardTarget searchShardTarget = null; if (nodeId != null) { searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias); } return new ShardSearchFailure(exception, searchShardTarget); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 94779f735111a..2d638aa72835f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -91,10 +91,10 @@ protected void doExecute(Task task, OpenPointInTimeRequest request, ActionListen searchRequest, "open_search_context", true, - (searchTask, shardTarget, connection, phaseListener) -> { + (searchTask, shardIt, connection, phaseListener) -> { final ShardOpenReaderRequest shardRequest = new ShardOpenReaderRequest( - shardTarget.getShardId(), - shardTarget.getOriginalIndices(), + shardIt.shardId(), + shardIt.getOriginalIndices(), request.keepAlive() ); transportService.sendChildRequest( diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 44d02eaf81214..f6ee2eda2ce87 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -82,6 +83,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.BiFunction; +import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.stream.Collectors; @@ -140,10 +142,43 @@ public TransportSearchAction(ThreadPool threadPool, this.executorSelector = executorSelector; } - private Map buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState, - Index[] concreteIndices, Map remoteAliasMap) { + private Map buildPerIndexOriginalIndices(ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices, + IndicesOptions indicesOptions) { + Map res = new HashMap<>(); + for (Index index : concreteIndices) { + clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); + + String[] aliases = indexNameExpressionResolver.indexAliases(clusterState, index.getName(), aliasMetadata -> true, + dataStreamAlias -> true, true, indicesAndAliases); + BooleanSupplier hasDataStreamRef = () -> { + IndexAbstraction ret = clusterState.getMetadata().getIndicesLookup().get(index.getName()); + if (ret == null || ret.getParentDataStream() == null) { + return false; + } + return indicesAndAliases.contains(ret.getParentDataStream().getName()); + }; + List finalIndices = new ArrayList<>(); + if (aliases == null + || aliases.length == 0 + || indicesAndAliases.contains(index.getName()) + || hasDataStreamRef.getAsBoolean()) { + finalIndices.add(index.getName()); + } + if (aliases != null) { + finalIndices.addAll(Arrays.asList(aliases)); + } + res.put(index.getUUID(), new OriginalIndices(finalIndices.toArray(new String[0]), indicesOptions)); + } + return Collections.unmodifiableMap(res); + } + + private Map buildPerIndexAliasFilter(ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices, + Map remoteAliasMap) { final Map aliasFilterMap = new HashMap<>(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, index.getName(), indicesAndAliases); @@ -225,7 +260,7 @@ protected void doExecute(Task task, SearchRequest searchRequest, ActionListener< } public interface SinglePhaseSearchAction { - void executeOnShardTarget(SearchTask searchTask, SearchShardTarget target, Transport.Connection connection, + void executeOnShardTarget(SearchTask searchTask, SearchShardIterator shardIt, Transport.Connection connection, ActionListener listener); } @@ -248,7 +283,7 @@ public AbstractSearchAsyncAction asyncSearchAction( protected void executePhaseOnShard(SearchShardIterator shardIt, SearchShardTarget shard, SearchActionListener listener) { final Transport.Connection connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - phaseSearchAction.executeOnShardTarget(task, shard, connection, listener); + phaseSearchAction.executeOnShardTarget(task, shardIt, connection, listener); } @Override @@ -593,8 +628,11 @@ static List getRemoteShardsIteratorFromPointInTime(Map localShardRoutings = clusterService.operationRouting().searchShards(clusterState, concreteLocalIndices, routingMap, searchRequest.preference(), searchService.getResponseCollectorService(), nodeSearchCounts); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + aliasFilter = buildPerIndexAliasFilter(clusterState, indicesAndAliases, indices, remoteAliasMap); + final Map finalIndicesMap = + buildPerIndexOriginalIndices(clusterState, indicesAndAliases, indices, searchRequest.indicesOptions()); localShardIterators = StreamSupport.stream(localShardRoutings.spliterator(), false) - .map(it -> new SearchShardIterator( - searchRequest.getLocalClusterAlias(), it.shardId(), it.getShardRoutings(), localIndices)) + .map(it -> { + OriginalIndices finalIndices = finalIndicesMap.get(it.shardId().getIndex().getUUID()); + assert finalIndices != null; + return new SearchShardIterator(searchRequest.getLocalClusterAlias(), it.shardId(), it.getShardRoutings(), finalIndices); + }) .collect(Collectors.toList()); - aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); } final GroupShardsIterator shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); @@ -958,7 +1002,9 @@ static List getLocalLocalShardsIteratorFromPointInTime(Clus } } } - iterators.add(new SearchShardIterator(localClusterAlias, shardId, targetNodes, originalIndices, + OriginalIndices finalIndices = new OriginalIndices(new String[] { shardId.getIndexName() }, + originalIndices.indicesOptions()); + iterators.add(new SearchShardIterator(localClusterAlias, shardId, targetNodes, finalIndices, perNode.getSearchContextId(), keepAlive)); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java index c2d7691689f28..dbdc12b1084e2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAlias.java @@ -143,6 +143,10 @@ public CompressedXContent getFilter() { return filter; } + public boolean filteringRequired() { + return filter != null; + } + /** * Returns a new {@link DataStreamAlias} instance with the provided data stream name added to it as a new member. * If the provided isWriteDataStream is set to true then the provided data stream is also set as write data stream. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 679e283569c2a..285b8aa5983a8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -501,7 +501,8 @@ public Set resolveExpressions(ClusterState state, String... expressions) * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { - return indexAliases(state, index, AliasMetadata::filteringRequired, false, resolvedExpressions); + return indexAliases(state, index, + AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } /** @@ -520,8 +521,12 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * the index itself - null is returned. Returns {@code null} if no filtering is required. *

NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. */ - public String[] indexAliases(ClusterState state, String index, Predicate requiredAlias, boolean skipIdentity, - Set resolvedExpressions) { + public String[] indexAliases(ClusterState state, + String index, + Predicate requiredAlias, + Predicate requiredDataStreamAlias, + boolean skipIdentity, + Set resolvedExpressions) { if (isAllIndices(resolvedExpressions)) { return null; } @@ -548,7 +553,7 @@ public String[] indexAliases(ClusterState state, String index, Predicate dataStreamAlias.getDataStreams().contains(dataStream.getName())) - .filter(dataStreamAlias -> dataStreamAlias.getFilter() != null) + .filter(requiredDataStreamAlias) .map(DataStreamAlias::getName) .toArray(String[]::new); } else { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 90040abac100d..54a0f5dcd8cd1 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -1528,7 +1528,7 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set filterParser = bytes -> { try (InputStream inputStream = bytes.streamInput(); XContentParser parser = XContentFactory.xContentType(inputStream).xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, inputStream)) { + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, inputStream)) { return parseInnerQueryBuilder(parser); } }; diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 966f19feefc2e..ea0d42d611ad3 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.ParsingException; @@ -810,7 +809,7 @@ public static SearchHit createFromMap(Map values) { String nodeId = get(Fields._NODE, values, null); if (shardId != null && nodeId != null) { assert shardId.getIndexName().equals(index); - searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE)); + searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias)); } else { //these fields get set anyways when setting the shard target, //but we set them explicitly when we don't have enough info to rebuild the shard target diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 25f535c8b9aa3..20a2cba540fcc 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -18,7 +18,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; @@ -822,7 +821,7 @@ private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSear DefaultSearchContext searchContext = null; try { SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), - reader.indexShard().shardId(), request.getClusterAlias(), OriginalIndices.NONE); + reader.indexShard().shardId(), request.getClusterAlias()); searchContext = new DefaultSearchContext(reader, request, shardTarget, threadPool::relativeTimeInMillis, timeout, fetchPhase, lowLevelCancellation, clusterService.state().nodes().getMinNodeVersion()); diff --git a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 98e1cc97f6d46..974829c985dd2 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -8,7 +8,6 @@ package org.elasticsearch.search; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.core.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,9 +26,6 @@ public final class SearchShardTarget implements Writeable, Comparable randomExceptions() { actual = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{ new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)) + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)) }); expected = new ElasticsearchException("Elasticsearch exception [type=search_phase_execution_exception, " + "reason=all shards failed]"); diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 4a9e9b5f4db42..60e0c98e9688b 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.LockObtainFailedException; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -277,7 +276,7 @@ public void testQueryShardException() throws IOException { } public void testSearchException() throws IOException { - SearchShardTarget target = new SearchShardTarget("foo", new ShardId("bar", "_na_", 1), null, OriginalIndices.NONE); + SearchShardTarget target = new SearchShardTarget("foo", new ShardId("bar", "_na_", 1), null); SearchException ex = serialize(new SearchException(target, "hello world")); assertEquals(target, ex.shard()); assertEquals(ex.getMessage(), "hello world"); diff --git a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java index 0e47493150c95..212117e8d41c9 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java @@ -11,7 +11,6 @@ import com.fasterxml.jackson.core.JsonParseException; import org.apache.commons.codec.DecoderException; import org.apache.lucene.index.CorruptIndexException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -123,7 +122,7 @@ private static ShardSearchFailure createShardFailureParsingException(String erro private static SearchShardTarget createSearchShardTarget(String nodeId, int shardId, String index, String clusterAlias) { return new SearchShardTarget(nodeId, - new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias); } public void testGroupByNullTarget() { diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 51d1d9b2637c7..6c1e26c11dd05 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -72,7 +72,7 @@ private AbstractSearchAsyncAction createAction(SearchRequest resolvedNodes.add(Tuple.tuple(cluster, node)); return null; }; - + OriginalIndices originalIndices = new OriginalIndices(request.indices(), request.indicesOptions()); return new AbstractSearchAsyncAction("test", logger, null, nodeIdToConnection, Collections.singletonMap("foo", new AliasFilter(new MatchAllQueryBuilder())), Collections.singletonMap("foo", 2.0f), null, request, listener, @@ -104,6 +104,11 @@ public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.C OriginalIndices originalIndices) { releasedContexts.add(contextId); } + + @Override + public OriginalIndices getOriginalIndices(int shardIndex) { + return originalIndices; + } }; } @@ -159,7 +164,7 @@ public void testSendSearchResponseDisallowPartialFailures() { ShardId failureShardId = new ShardId("index", "index-uuid", i); String failureClusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); String failureNodeId = randomAlphaOfLengthBetween(5, 10); - action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias, OriginalIndices.NONE), + action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias), new IllegalArgumentException()); } action.sendSearchResponse(InternalSearchResponse.empty(), phaseResults.results); @@ -235,7 +240,7 @@ private static ArraySearchPhaseResults phaseResults(Set {}); } diff --git a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java index 86d69d7be527d..ee92f3da3422e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java @@ -86,13 +86,13 @@ public void testClearScrollIds() throws IOException, InterruptedException { AtomicArray array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 1), node1); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 12), node2); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 42), node3); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); @@ -150,13 +150,13 @@ public void testClearScrollIdsWithFailure() throws IOException, InterruptedExcep AtomicArray array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 1), node1); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 12), node2); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 42), node3); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); diff --git a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java index a46fb722d1733..ed010c5c146de 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.action.search; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; @@ -54,13 +53,13 @@ public void testCollect() throws InterruptedException { new ShardSearchContextId(UUIDs.randomBase64UUID(), shardID), null, null); dfsSearchResult.setShardIndex(shardID); dfsSearchResult.setSearchShardTarget(new SearchShardTarget("foo", - new ShardId("bar", "baz", shardID), null, OriginalIndices.NONE)); + new ShardId("bar", "baz", shardID), null)); collector.onResult(dfsSearchResult);}); break; case 2: state.add(2); executor.execute(() -> collector.onFailure(shardID, new SearchShardTarget("foo", new ShardId("bar", "baz", shardID), - null, OriginalIndices.NONE), new RuntimeException("boom"))); + null), new RuntimeException("boom"))); break; default: fail("unknown state"); diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index fd5e8a424b273..cab54fdc78a1e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; @@ -47,9 +46,9 @@ public void testDfsWith2Shards() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null))); results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -59,7 +58,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest SearchActionListener listener) { if (request.contextId().getId() == 1) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -67,7 +66,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest listener.onResponse(queryResult); } else if (request.contextId().getId() == 2) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -111,9 +110,9 @@ public void testDfsWith1ShardFailed() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null))); results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -124,7 +123,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest if (request.contextId().getId() == 1) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), new SearchShardTarget("node1", new ShardId("test", "na", 0), - null, OriginalIndices.NONE), null); + null), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs( new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -173,9 +172,9 @@ public void testFailPhaseOnException() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null))); results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + new SearchShardTarget("node2", new ShardId("test", "na", 0), null))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -185,7 +184,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest SearchActionListener listener) { if (request.contextId().getId() == 1) { QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 9f8f3b8762832..3323f127bbe63 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; @@ -55,7 +54,7 @@ public void testShortcutQueryAndFetchOptimization() { if (hasHits) { QuerySearchResult queryResult = new QuerySearchResult(); queryResult.setSearchShardTarget(new SearchShardTarget("node0", - new ShardId("index", "index", 0), null, OriginalIndices.NONE)); + new ShardId("index", "index", 0), null)); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]); addProfiling(profiled, queryResult); @@ -113,7 +112,7 @@ public void testFetchTwoDocument() { boolean profiled = randomBoolean(); ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -123,7 +122,7 @@ public void testFetchTwoDocument() { results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -181,7 +180,7 @@ public void testFailFetchOneDoc() { boolean profiled = randomBoolean(); final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -190,7 +189,7 @@ public void testFailFetchOneDoc() { addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -267,7 +266,7 @@ public void testFetchDocsConcurrently() throws InterruptedException { mockSearchPhaseContext.getRequest(), numHits, exc -> {}); SearchShardTarget[] shardTargets = new SearchShardTarget[numHits]; for (int i = 0; i < numHits; i++) { - shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null, OriginalIndices.NONE); + shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), shardTargets[i], null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]); @@ -346,7 +345,7 @@ public void testExceptionFailsPhase() { int resultSetSize = randomIntBetween(2, 10); boolean profiled = randomBoolean(); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -355,7 +354,7 @@ public void testExceptionFailsPhase() { addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -411,7 +410,7 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s boolean profiled = randomBoolean(); final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null); QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -421,7 +420,7 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null); queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index e70eacbc1f651..b612e1bbbb2c3 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -77,6 +77,11 @@ public SearchRequest getRequest() { return searchRequest; } + @Override + public OriginalIndices getOriginalIndices(int shardIndex) { + return new OriginalIndices(searchRequest.indices(), searchRequest.indicesOptions()); + } + @Override public void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults) { String scrollId = getRequest().scroll() != null ? TransportSearchHelper.buildScrollId(queryResults, Version.CURRENT) : null; diff --git a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java index 833987fedf3a5..1b0f0ae1fa9fd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; @@ -94,7 +93,7 @@ public void testProgressListenerExceptionsAreCaught() throws Exception { for (int i = 0; i < 10; i++) { SearchShardTarget searchShardTarget = new SearchShardTarget("node", new ShardId("index", "uuid", i), - null, OriginalIndices.NONE); + null); QuerySearchResult querySearchResult = new QuerySearchResult(); TopDocs topDocs = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]); querySearchResult.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 8c1b0d859f9f6..bdb43d699998c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -19,7 +19,6 @@ import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -321,7 +320,7 @@ private static AtomicArray generateQueryResults( for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { String clusterAlias = randomBoolean() ? null : "remote"; SearchShardTarget searchShardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), - clusterAlias, OriginalIndices.NONE); + clusterAlias); QuerySearchResult querySearchResult = new QuerySearchResult(new ShardSearchContextId("", shardIndex), searchShardTarget, null); final TopDocs topDocs; float maxScore = 0; @@ -477,13 +476,13 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { QuerySearchResult empty = QuerySearchResult.nullInstance(); int shardId = 2 + numEmptyResponses; empty.setShardIndex(2+numEmptyResponses); - empty.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null, OriginalIndices.NONE)); + empty.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null)); consumer.consumeResult(empty, latch::countDown); numEmptyResponses --; } QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", 0), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", 0), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); InternalAggregations aggs = InternalAggregations.from(singletonList(new InternalMax("test", 1.0D, DocValueFormat.RAW, emptyMap()))); @@ -492,7 +491,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { consumer.consumeResult(result, latch::countDown); result = new QuerySearchResult(new ShardSearchContextId("", 1), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", 0), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); aggs = InternalAggregations.from(singletonList(new InternalMax("test", 3.0D, DocValueFormat.RAW, emptyMap()))); @@ -501,7 +500,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { consumer.consumeResult(result, latch::countDown); result = new QuerySearchResult(new ShardSearchContextId("", 1), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", 0), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); aggs = InternalAggregations.from(singletonList(new InternalMax("test", 2.0D, DocValueFormat.RAW, emptyMap()))); @@ -513,7 +512,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { result = QuerySearchResult.nullInstance(); int shardId = 2 + numEmptyResponses; result.setShardIndex(shardId); - result.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null, OriginalIndices.NONE)); + result.setSearchShardTarget(new SearchShardTarget("node", new ShardId("a", "b", shardId), null)); consumer.consumeResult(result, latch::countDown); numEmptyResponses--; @@ -567,7 +566,7 @@ public void testConsumerConcurrently() throws Exception { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", id), - new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", id), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); @@ -615,7 +614,7 @@ public void testConsumerOnlyAggs() throws Exception { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), new DocValueFormat[0]); InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new InternalMax("test", (double) number, @@ -657,7 +656,7 @@ public void testConsumerOnlyHits() throws Exception { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); result.setShardIndex(i); @@ -699,7 +698,7 @@ public void testReduceTopNWithFromOffset() throws Exception { CountDownLatch latch = new CountDownLatch(4); for (int i = 0; i < 4; i++) { QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); ScoreDoc[] docs = new ScoreDoc[3]; for (int j = 0; j < docs.length; j++) { docs[j] = new ScoreDoc(0, score--); @@ -744,7 +743,7 @@ public void testConsumerSortByField() throws Exception { FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, new Object[]{number})}; TopDocs topDocs = new TopFieldDocs(new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); result.setShardIndex(i); result.size(size); @@ -785,7 +784,7 @@ public void testConsumerFieldCollapsing() throws Exception { FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, values)}; TopDocs topDocs = new CollapseTopFieldDocs("field", new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields, values); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); result.setShardIndex(i); result.size(size); @@ -821,7 +820,7 @@ public void testConsumerSuggestions() throws Exception { CountDownLatch latch = new CountDownLatch(expectedNumResults); for (int i = 0; i < expectedNumResults; i++) { QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", i), null), null); List>> suggestions = new ArrayList<>(); { @@ -950,7 +949,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", id), - new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE), null); + new SearchShardTarget("node", new ShardId("a", "b", id), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[]{new ScoreDoc(0, number)}), number), new DocValueFormat[0]); @@ -1016,7 +1015,7 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t final int index = i; threads[index] = new Thread(() -> { QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), index), - new SearchShardTarget("node", new ShardId("a", "b", index), null, OriginalIndices.NONE), + new SearchShardTarget("node", new ShardId("a", "b", index), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), @@ -1066,7 +1065,7 @@ public void testFailConsumeAggs() throws Exception { for (int i = 0; i < expectedNumResults; i++) { final int index = i; QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), index), - new SearchShardTarget("node", new ShardId("a", "b", index), null, OriginalIndices.NONE), + new SearchShardTarget("node", new ShardId("a", "b", index), null), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java index 4b4f482406d2a..454a2ee664605 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseExecutionExceptionTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -38,11 +37,11 @@ public void testToXContent() throws IOException { SearchPhaseExecutionException exception = new SearchPhaseExecutionException("test", "all shards failed", new ShardSearchFailure[]{ new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 0), null, OriginalIndices.NONE)), + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 0), null)), new ShardSearchFailure(new IndexShardClosedException(new ShardId("foo", "_na_", 1)), - new SearchShardTarget("node_2", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)), + new SearchShardTarget("node_2", new ShardId("foo", "_na_", 1), null)), new ShardSearchFailure(new ParsingException(5, 7, "foobar", null), - new SearchShardTarget("node_3", new ShardId("foo", "_na_", 2), null, OriginalIndices.NONE)), + new SearchShardTarget("node_3", new ShardId("foo", "_na_", 2), null)), }); // Failures are grouped (by default) @@ -94,7 +93,7 @@ public void testToAndFromXContent() throws IOException { new NullPointerException() ); shardSearchFailures[i] = new ShardSearchFailure(cause, new SearchShardTarget("node_" + i, - new ShardId("test", "_na_", i), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", i), null)); } final String phase = randomFrom("query", "search", "other"); @@ -143,7 +142,7 @@ public void testPhaseFailureWithSearchShardFailure() { new InvalidIndexTemplateException("foo", "bar") ); shardSearchFailures[i] = new ShardSearchFailure(cause, new SearchShardTarget("node_" + i, - new ShardId("test", "_na_", i), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", i), null)); } final String phase = randomFrom("fetch", "search", "other"); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index ab7f3f1e68638..a2c170330a056 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -100,7 +100,7 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest numWithTopDocs.incrementAndGet(); } QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); if (withCollapse) { queryResult.topDocs(new TopDocsAndMaxScore( @@ -313,7 +313,7 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest SearchTask task, SearchActionListener listener) { int shardId = request.shardId().id(); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); if (shardId == 0) { queryResult.topDocs(new TopDocsAndMaxScore(new TopFieldDocs( @@ -412,7 +412,7 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest SearchTask task, SearchActionListener listener) { int shardId = request.shardId().id(); QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); if (shardId == 0) { queryResult.topDocs(new TopDocsAndMaxScore(new TopFieldDocs( @@ -471,7 +471,7 @@ public void run() { assertThat(phase.totalHits.value, equalTo(2L)); assertThat(phase.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); - SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null, OriginalIndices.NONE); + SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { @Override public void onFailure(Exception e) { } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index b6d9e40878f77..7663525372847 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.common.text.Text; import org.elasticsearch.core.Tuple; @@ -121,7 +120,7 @@ public void testMergeShardFailures() throws InterruptedException { ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; for (int j = 0; j < numFailures; j++) { ShardId shardId = new ShardId(randomFrom(indices), j); - SearchShardTarget searchShardTarget = new SearchShardTarget(randomAlphaOfLength(6), shardId, clusterAlias, null); + SearchShardTarget searchShardTarget = new SearchShardTarget(randomAlphaOfLength(6), shardId, clusterAlias); ShardSearchFailure failure = new ShardSearchFailure(new IllegalArgumentException(), searchShardTarget); shardSearchFailures[j] = failure; priorityQueue.add(Tuple.tuple(searchShardTarget, failure)); @@ -252,7 +251,7 @@ public void testMergeCompletionSuggestions() throws InterruptedException { ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomIntBetween(0, Integer.MAX_VALUE)); String clusterAlias = randomBoolean() ? "" : randomAlphaOfLengthBetween(5, 10); - hit.shard(new SearchShardTarget("node", shardId, clusterAlias, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("node", shardId, clusterAlias)); option.setHit(hit); options.addOption(option); completionSuggestion.addTerm(options); @@ -300,7 +299,7 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedExceptio Collections.emptyMap()); SearchHit searchHit = new SearchHit(docId); searchHit.shard(new SearchShardTarget("node", new ShardId("index", "uuid", randomIntBetween(0, Integer.MAX_VALUE)), - randomBoolean() ? RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY : randomAlphaOfLengthBetween(5, 10), OriginalIndices.NONE)); + randomBoolean() ? RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY : randomAlphaOfLengthBetween(5, 10))); option.setHit(searchHit); options.addOption(option); completionSuggestion.addTerm(options); @@ -664,7 +663,7 @@ private static SearchHit[] randomSearchHitArray(int numDocs, int numResponses, S for (int j = 0; j < numDocs; j++) { ShardId shardId = new ShardId(randomFrom(indices), randomIntBetween(0, 10)); SearchShardTarget shardTarget = new SearchShardTarget(randomAlphaOfLengthBetween(3, 8), shardId, - clusterAlias, OriginalIndices.NONE); + clusterAlias); SearchHit hit = new SearchHit(randomIntBetween(0, Integer.MAX_VALUE)); float score = Float.NaN; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java index d2dd66ba774f7..3fbdcf58f92df 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.UUIDs; @@ -63,7 +62,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); }).start(); } @@ -154,7 +153,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); }).start(); } @@ -227,7 +226,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); }).start(); } @@ -304,7 +303,7 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode()); testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), - new ShardId("test", "_na_", 1), null, OriginalIndices.NONE)); + new ShardId("test", "_na_", 1), null)); searchActionListener.onResponse(testSearchPhaseResult); } }).start(); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java index 2c3435cc80457..f602d0b460304 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchShardIteratorTests.java @@ -64,7 +64,6 @@ public void testNewSearchShardTarget() { assertEquals(clusterAlias, searchShardTarget.getClusterAlias()); assertSame(shardId, searchShardTarget.getShardId()); assertEquals(nodeId, searchShardTarget.getNodeId()); - assertSame(originalIndices, searchShardTarget.getOriginalIndices()); } public void testEqualsAndHashcode() { diff --git a/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java b/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java index 724b158a0e0d4..c84687a7affe5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.search; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; @@ -37,7 +36,7 @@ public static ShardSearchFailure createTestItem(String indexUuid) { String indexName = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? randomAlphaOfLengthBetween(5, 10) : null; searchShardTarget = new SearchShardTarget(nodeId, - new ShardId(new Index(indexName, indexUuid), randomInt()), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(indexName, indexUuid), randomInt()), clusterAlias); } return new ShardSearchFailure(ex, searchShardTarget); } @@ -91,7 +90,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws public void testToXContent() throws IOException { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(0, 0, "some message", null), - new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), null, OriginalIndices.NONE)); + new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), null)); BytesReference xContent = toXContent(failure, XContentType.JSON, randomBoolean()); assertEquals( "{\"shard\":123," @@ -109,7 +108,7 @@ public void testToXContent() throws IOException { public void testToXContentWithClusterAlias() throws IOException { ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(0, 0, "some message", null), - new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), "cluster1", OriginalIndices.NONE)); + new SearchShardTarget("nodeId", new ShardId(new Index("indexName", "indexUuid"), 123), "cluster1")); BytesReference xContent = toXContent(failure, XContentType.JSON, randomBoolean()); assertEquals( "{\"shard\":123," diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index bfb0e0865c906..c76ef34298f9b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -974,6 +974,7 @@ public void testLocalShardIteratorFromPointInTime() { final IndexMetadata indexMetadata = clusterState.metadata().index("test-1"); Map contexts = new HashMap<>(); Set relocatedContexts = new HashSet<>(); + Map aliasFilterMap = new HashMap<>(); for (int shardId = 0; shardId < numberOfShards; shardId++) { final String targetNode; if (randomBoolean()) { @@ -987,13 +988,15 @@ public void testLocalShardIteratorFromPointInTime() { contexts.put(new ShardId(indexMetadata.getIndex(), shardId), new SearchContextIdForNode(null, targetNode, new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong(), null))); + aliasFilterMap.putIfAbsent(indexMetadata.getIndexUUID(), AliasFilter.EMPTY); } TimeValue keepAlive = randomBoolean() ? null : TimeValue.timeValueSeconds(between(30, 3600)); + final List shardIterators = TransportSearchAction.getLocalLocalShardsIteratorFromPointInTime( clusterState, OriginalIndices.NONE, null, - new SearchContextId(contexts, Collections.emptyMap()), + new SearchContextId(contexts, aliasFilterMap), keepAlive ); shardIterators.sort(Comparator.comparing(SearchShardIterator::shardId)); diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java index a694d22baec36..896c35983bd11 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java @@ -28,13 +28,13 @@ public static AtomicArray generateQueryResults() { DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("a", 1), node1); - testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), "cluster_x", null)); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), "cluster_x")); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("b", 12), node2); - testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), "cluster_y", null)); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), "cluster_y")); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("c", 42), node3); - testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index ef2dafc398065..cfea739cf9917 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -1423,11 +1423,11 @@ public void testIndexAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); - String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, true, resolvedExpressions); + String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias-0", "test-alias-1", "test-alias-non-filtering"}, strings); - strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> x.alias().equals("test-alias-1"), true, + strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> x.alias().equals("test-alias-1"), x -> false, true, resolvedExpressions); assertArrayEquals(null, strings); } @@ -1452,21 +1452,21 @@ public void testIndexAliasesDataStreamAliases() { // Only resolve aliases with with that refer to dataStreamName1 Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); - String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, true, resolvedExpressions); - assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs")); + String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); + assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar")); } { // Only resolve aliases with with that refer to dataStreamName2 Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); - String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, true, resolvedExpressions); - assertThat(result, arrayContainingInAnyOrder("logs_baz")); + String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); + assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2")); } { // Null is returned, because skipping identity check and resolvedExpressions contains the backing index name Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); - String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, false, resolvedExpressions); + String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, false, resolvedExpressions); assertThat(result, nullValue()); } } @@ -1480,15 +1480,15 @@ public void testIndexAliasesSkipIdentity() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); - String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, false, resolvedExpressions); + String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); - aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, true, resolvedExpressions); + aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] {"test-alias"}, aliases); resolvedExpressions = Collections.singleton("other-alias"); - aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, false, resolvedExpressions); + aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertArrayEquals(new String[] {"other-alias"}, aliases); - aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, true, resolvedExpressions); + aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] {"other-alias"}, aliases); } @@ -1499,7 +1499,7 @@ public void testConcreteWriteIndexSuccessful() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); IndicesRequest request = new IndicesRequest() { @@ -1570,7 +1570,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); IndicesRequest request = new IndicesRequest() { @@ -1603,7 +1603,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(false))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), @@ -1623,7 +1623,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(false, null)))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); DocWriteRequest request = randomFrom(new IndexRequest("test-alias"), @@ -1644,7 +1644,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { .putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(test0WriteIndex == false, null)))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); String[] strings = indexNameExpressionResolver - .indexAliases(state, "test-0", x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); + .indexAliases(state, "test-0", x -> true, x -> true, true, new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias"))); Arrays.sort(strings); assertArrayEquals(new String[] {"test-alias"}, strings); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, @@ -2274,7 +2274,7 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 1, epochMillis))); assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); - assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis)));; + assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; diff --git a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index f9a0f5deed788..556db096885f0 100644 --- a/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; @@ -139,9 +138,9 @@ public void testConvert() throws IOException { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)); + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), - new SearchShardTarget("node_1", new ShardId("foo", "_na_", 2), null, OriginalIndices.NONE)); + new SearchShardTarget("node_1", new ShardId("foo", "_na_", 2), null)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java index 5595dab0a3628..26d71adebdc79 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.rest.action; import com.fasterxml.jackson.core.io.JsonEOFException; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -188,7 +187,7 @@ private static ShardSearchFailure createShardFailureParsingException(String node private static SearchShardTarget createSearchShardTarget(String nodeId, int shardId, String index, String clusterAlias) { return new SearchShardTarget(nodeId, - new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(index, IndexMetadata.INDEX_UUID_NA_VALUE), shardId), clusterAlias); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index 07ebb51243174..1a957b140d3d8 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -17,7 +17,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.UUIDs; @@ -129,7 +128,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) { } }; - SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); + SearchShardTarget target = new SearchShardTarget("node", shardId, null); ReaderContext readerWithoutScroll = new ReaderContext( newContextId(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false); @@ -277,7 +276,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) { return searcher; } }; - SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); + SearchShardTarget target = new SearchShardTarget("node", shardId, null); ReaderContext readerContext = new ReaderContext( newContextId(), indexService, indexShard, searcherSupplier, randomNonNegativeLong(), false); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index acb89ef090cd3..753be2e9deac8 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -130,7 +129,7 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp String index = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), - new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias, OriginalIndices.NONE)); + new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias)); } return hit; } @@ -217,7 +216,7 @@ public void testToXContent() throws IOException { public void testSerializeShardTarget() throws Exception { String clusterAlias = randomBoolean() ? null : "cluster_alias"; SearchShardTarget target = new SearchShardTarget("_node_id", new ShardId(new Index("_index", "_na_"), 0), - clusterAlias, OriginalIndices.NONE); + clusterAlias); Map innerHits = new HashMap<>(); SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null, null); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 4d0d621e2fb43..c36712e68c0db 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.TestUtil; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; @@ -220,7 +219,7 @@ public void testFromXContentWithShards() throws IOException { String index = randomAlphaOfLengthBetween(5, 10); String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); final SearchShardTarget shardTarget = new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), - new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias, OriginalIndices.NONE); + new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias); if (withExplanation) { hit.explanation(SearchHitTests.createExplanation(randomIntBetween(0, 5))); } diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java index dda8111af2015..80468f9fea196 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java @@ -104,6 +104,6 @@ private static FetchSearchResult fetchResult(SearchShardTarget target, ProfileRe } private static SearchShardTarget randomTarget() { - return new SearchShardTarget(randomAlphaOfLength(5), new ShardId(randomAlphaOfLength(5), "uuid", randomInt(6)), null, null); + return new SearchShardTarget(randomAlphaOfLength(5), new ShardId(randomAlphaOfLength(5), "uuid", randomInt(6)), null); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index df79662e304be..ff0ec52454db8 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndicesTests; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; @@ -57,7 +56,7 @@ private static QuerySearchResult createTestInstance() throws Exception { ShardSearchRequest shardSearchRequest = new ShardSearchRequest(OriginalIndicesTests.randomOriginalIndices(), searchRequest, shardId, 0, 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, randomNonNegativeLong(), null); QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.base64UUID(), randomLong()), - new SearchShardTarget("node", shardId, null, OriginalIndices.NONE), shardSearchRequest); + new SearchShardTarget("node", shardId, null), shardSearchRequest); if (randomBoolean()) { result.terminatedEarly(randomBoolean()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 287de3d73e905..c27ebba81391d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.core.TimeValue; @@ -55,7 +54,7 @@ public class TestSearchContext extends SearchContext { public static final SearchShardTarget SHARD_TARGET = - new SearchShardTarget("test", new ShardId("test", "test", 0), null, OriginalIndices.NONE); + new SearchShardTarget("test", new ShardId("test", "test", 0), null); final IndexService indexService; final BitsetFilterCache fixedBitSetFilterCache; diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java index 36b5cfd84da5c..d52b0f465710f 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -246,7 +245,7 @@ public void testWithFetchFailures() throws InterruptedException { IOException failure = new IOException("boum"); //fetch failures are currently ignored, they come back with onFailure or onResponse anyways task.getSearchProgressActionListener().onFetchFailure(i, - new SearchShardTarget("0", new ShardId("0", "0", 1), null, OriginalIndices.NONE), + new SearchShardTarget("0", new ShardId("0", "0", 1), null), failure); shardSearchFailures[i] = new ShardSearchFailure(failure); } @@ -280,7 +279,7 @@ public void testFatalFailureDuringFetch() throws InterruptedException { for (int i = 0; i < numShards; i++) { //fetch failures are currently ignored, they come back with onFailure or onResponse anyways task.getSearchProgressActionListener().onFetchFailure(i, - new SearchShardTarget("0", new ShardId("0", "0", 1), null, OriginalIndices.NONE), + new SearchShardTarget("0", new ShardId("0", "0", 1), null), new IOException("boum")); } assertCompletionListeners(task, totalShards, totalShards, numSkippedShards, 0, true, false); diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 89ce1a322187a..ab3bd8ecc8760 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -27,6 +27,7 @@ dependencies { testImplementation project(path: xpackModule('sql:sql-action')) testImplementation project(path: ':modules:analysis-common') testImplementation project(path: ':modules:reindex') + testImplementation project(path: xpackModule('data-streams')) testImplementation project(":client:rest-high-level") testImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java index 4227bc2ee1699..7037fee9f542a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/IndicesPermissionsWithAliasesWildcardsAndRegexsTests.java @@ -7,16 +7,36 @@ package org.elasticsearch.integration; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.xpack.core.action.CreateDataStreamAction; +import org.elasticsearch.xpack.core.action.DeleteDataStreamAction; +import org.elasticsearch.xpack.datastreams.DataStreamsPlugin; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -67,7 +87,14 @@ public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .build(); } - public void testResolveWildcardsRegexs() throws Exception { + @Override + protected Collection> nodePlugins() { + List> lst = new ArrayList<>(super.nodePlugins()); + lst.add(DataStreamsPlugin.class); + return lst; + } + + public void testGetResolveWildcardsRegexs() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .addMapping("type1", "field1", "type=text", "field2", "type=text") .addAlias(new Alias("my_alias")) @@ -99,4 +126,173 @@ public void testResolveWildcardsRegexs() throws Exception { assertThat((String) getResponse.getSource().get("field3"), equalTo("value3")); } + public void testSearchResolveWildcardsRegexs() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test") + .addMapping("_doc", "field1", "type=text", "field2", "type=text") + .addAlias(new Alias("my_alias")) + .addAlias(new Alias("an_alias")) + ); + client().prepareIndex("test", "_doc") + .setId("1") + .setSource("field1", "value1", "field2", "value2", "field3", "value3") + .setRefreshPolicy(IMMEDIATE) + .get(); + + SearchResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + Map source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field1"), equalTo("value1")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("my_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field2"), equalTo("value2")); + + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("an_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias", "t*") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat((String) source.get("field1"), equalTo("value1")); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } + + public void testSearchResolveDataStreams() throws Exception { + putComposableIndexTemplate("id1", Arrays.asList("test*")); + CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request("test"); + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).get(); + + IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(); + aliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD) + .aliases("my_alias", "an_alias") + .index("test")); + assertAcked(client().admin().indices().aliases(aliasesRequest).actionGet()); + + try { + String value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); + client().prepareIndex("test", "_doc") + .setCreate(true) + .setId("1") + .setSource(DEFAULT_TIMESTAMP_FIELD, value, "field1", "value1", "field2", "value2", "field3", "value3") + .setRefreshPolicy(IMMEDIATE) + .get(); + + SearchResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + Map source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field1"), equalTo("value1")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("my_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field2"), equalTo("value2")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("an_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("*_alias", "t*") + .setQuery(QueryBuilders.termQuery("_id", "1")) + .get(); + assertThat(response.getHits().getHits().length, equalTo(1)); + source = response.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat((String) source.get("field1"), equalTo("value1")); + assertThat((String) source.get("field2"), equalTo("value2")); + assertThat((String) source.get("field3"), equalTo("value3")); + } finally { + AcknowledgedResponse response = client().execute( + DeleteDataStreamAction.INSTANCE, + new DeleteDataStreamAction.Request(new String[]{"*"}) + ).actionGet(); + assertAcked(response); + + DeleteDataStreamAction.Request deleteDSRequest = new DeleteDataStreamAction.Request(new String[]{"*"}); + client().execute(DeleteDataStreamAction.INSTANCE, deleteDSRequest).actionGet(); + DeleteComposableIndexTemplateAction.Request deleteTemplateRequest = new DeleteComposableIndexTemplateAction.Request("*"); + client().execute(DeleteComposableIndexTemplateAction.INSTANCE, deleteTemplateRequest).actionGet(); + } + } + + private void putComposableIndexTemplate(String id, List patterns) throws IOException { + PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); + request.indexTemplate( + new ComposableIndexTemplate( + patterns, + new Template(null, null, null), + null, + null, + null, + null, + new ComposableIndexTemplate.DataStreamTemplate(), + null + ) + ); + client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java index f82e45f3c7e50..365cc16d076ee 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java @@ -35,7 +35,7 @@ public void testFromSearchHit() { String index = randomAlphaOfLength(10); searchHit.setSeqNo(seqNo); searchHit.setPrimaryTerm(primaryTerm); - searchHit.shard(new SearchShardTarget("anynode", new ShardId(index, randomAlphaOfLength(10), 1), null, null)); + searchHit.shard(new SearchShardTarget("anynode", new ShardId(index, randomAlphaOfLength(10), 1), null)); assertThat(SeqNoPrimaryTermAndIndex.fromSearchHit(searchHit), equalTo(new SeqNoPrimaryTermAndIndex(seqNo, primaryTerm, index))); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index 330c1fa0e0df8..a26ee58e24d26 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.condition; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.text.Text; @@ -79,7 +78,7 @@ public void testExecuteAccessHits() throws Exception { Clock.systemUTC()); SearchHit hit = new SearchHit(0, "1", new Text("type"), null, null); hit.score(1f); - hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); InternalSearchResponse internalSearchResponse = new InternalSearchResponse( new SearchHits(new SearchHit[]{hit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index ca40b85e0ae32..ba5277c61503e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; @@ -182,7 +181,7 @@ void stopExecutor() { String id = String.valueOf(i); SearchHit hit = new SearchHit(1, id, new Text("watch"), Collections.emptyMap(), Collections.emptyMap()); hit.version(1L); - hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever")); hits[i] = hit; boolean active = randomBoolean(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 0da891126bb24..60d54f55fadd9 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; @@ -220,7 +219,7 @@ public void testFindTriggeredWatchesGoodCase() { BytesArray source = new BytesArray("{}"); SearchHit hit = new SearchHit(0, "first_foo", new Text(SINGLE_MAPPING_NAME), null, null); hit.version(1L); - hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); SearchHits hits = new SearchHits(new SearchHit[]{hit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); when(searchResponse1.getHits()).thenReturn(hits); @@ -235,7 +234,7 @@ public void testFindTriggeredWatchesGoodCase() { // First return a scroll response with a single hit and then with no hits hit = new SearchHit(0, "second_foo", new Text(SINGLE_MAPPING_NAME), null, null); hit.version(1L); - hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); + hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); hits = new SearchHits(new SearchHit[]{hit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); SearchResponse searchResponse2 = new SearchResponse(