From 8ef3af2224a4f640f60395a8f5a8eae28ff9f17f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 30 Aug 2021 09:53:28 -0400 Subject: [PATCH] Profile the fetch phase This adds profiling to the fetch phase so we can tell when fetching is slower than we'd like and we can tell which portion of the fetch is slow. The output includes which stored fields were loaded, how long it took to load stored fields, which fetch sub-phases were run, and how long those fetch sub-phases took. Closes #75892 --- .../search/TransportNoopSearchAction.java | 4 +- .../documentation/SearchDocumentationIT.java | 12 +- docs/reference/search/profile.asciidoc | 61 ++++-- .../PercolatorHighlightSubFetchPhase.java | 10 + .../PercolatorMatchedSlotSubFetchPhase.java | 10 + .../test/search/330_fetch_fields.yml | 50 +++++ .../action/search/TransportSearchIT.java | 32 +++- .../search/fetch/FetchSubPhasePluginIT.java | 10 + .../aggregation/AggregationProfilerIT.java | 32 ++-- .../search/profile/query/QueryProfilerIT.java | 24 +-- .../action/search/SearchPhaseController.java | 52 ++++-- .../action/search/SearchResponse.java | 18 +- .../action/search/SearchResponseMerger.java | 8 +- .../action/search/SearchResponseSections.java | 19 +- .../action/search/TransportSearchAction.java | 15 +- .../metrics/TopHitsAggregator.java | 23 +++ .../search/fetch/FetchPhase.java | 164 +++++++++++++---- .../search/fetch/FetchProfiler.java | 173 +++++++++++++++++ .../search/fetch/FetchSearchResult.java | 34 +++- .../search/fetch/FetchSubPhase.java | 10 + .../search/fetch/FetchSubPhaseProcessor.java | 13 ++ .../search/fetch/subphase/ExplainPhase.java | 9 + .../fetch/subphase/FetchDocValuesPhase.java | 9 + .../fetch/subphase/FetchFieldsPhase.java | 9 + .../fetch/subphase/FetchScorePhase.java | 9 + .../fetch/subphase/FetchSourcePhase.java | 86 +++++---- .../fetch/subphase/FetchVersionPhase.java | 9 + .../search/fetch/subphase/InnerHitsPhase.java | 12 +- .../fetch/subphase/MatchedQueriesPhase.java | 10 +- .../fetch/subphase/ScriptFieldsPhase.java | 11 +- .../fetch/subphase/SeqNoPrimaryTermPhase.java | 9 + .../subphase/highlight/HighlightPhase.java | 10 + .../internal/InternalSearchResponse.java | 21 ++- .../search/profile/ProfileResult.java | 13 +- .../search/profile/Profilers.java | 25 ++- ...ava => SearchProfileQueryPhaseResult.java} | 9 +- .../search/profile/SearchProfileResults.java | 160 ++++++++++++++++ .../profile/SearchProfileShardResult.java | 56 ++++++ .../profile/SearchProfileShardResults.java | 136 ++++---------- .../search/query/QueryPhase.java | 4 +- .../search/query/QuerySearchResult.java | 12 +- .../action/search/FetchSearchPhaseTests.java | 174 ++++++++++++++---- .../search/SearchPhaseControllerTests.java | 3 +- .../search/SearchResponseMergerTests.java | 12 +- .../action/search/SearchResponseTests.java | 17 +- ...ts.java => SearchProfileResultsTests.java} | 24 ++- .../search/AsyncSearchSingleNodeTests.java | 28 ++- .../DocumentLevelSecurityTests.java | 10 +- .../ClientTransformIndexerTests.java | 4 +- .../TransformIndexerFailureHandlingTests.java | 12 +- .../TransformIndexerStateTests.java | 4 +- .../transforms/TransformIndexerTests.java | 4 +- .../vectortile/rest/RestVectorTileAction.java | 4 +- 53 files changed, 1295 insertions(+), 394 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java rename server/src/main/java/org/elasticsearch/search/profile/{ProfileShardResult.java => SearchProfileQueryPhaseResult.java} (87%) create mode 100644 server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java create mode 100644 server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java rename server/src/test/java/org/elasticsearch/search/profile/{SearchProfileShardResultsTests.java => SearchProfileResultsTests.java} (83%) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java index 3dea36958dfa0..c6579e70f645e 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -40,7 +40,7 @@ protected void doExecute(Task task, SearchRequest request, ActionListener profilingResults = + Map profilingResults = searchResponse.getProfileResults(); // <1> - for (Map.Entry profilingResult : profilingResults.entrySet()) { // <2> + for (Map.Entry profilingResult : profilingResults.entrySet()) { // <2> String key = profilingResult.getKey(); // <3> - ProfileShardResult profileShardResult = profilingResult.getValue(); // <4> + SearchProfileShardResult profileShardResult = profilingResult.getValue(); // <4> } // end::search-request-profiling-get - ProfileShardResult profileShardResult = profilingResults.values().iterator().next(); + SearchProfileShardResult profileShardResult = profilingResults.values().iterator().next(); assertNotNull(profileShardResult); // tag::search-request-profiling-queries diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 2bb7640c9288e..7c58d6239ab4b 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -163,7 +163,38 @@ The API returns the following result: ] } ], - "aggregations": [] + "aggregations": [], + "fetch": { + "type": "fetch", + "description": "fetch", + "time_in_nanos": 660555, + "breakdown": { + "next_reader": 7292, + "next_reader_count": 1, + "load_stored_fields": 299325, + "load_stored_fields_count": 5 + }, + "debug": { + "stored_fields": ["_id", "_routing", "_source"] + }, + "children": [ + { + "type": "source", + "description": "load _source", + "time_in_nanos": 20443, + "breakdown": { + "next_reader": 745, + "next_reader_count": 1, + "process": 19698, + "process_count": 5 + }, + "debug": { + "loaded_nested": 0, + "fast_path": 5 + } + } + ] + } } ] } @@ -196,7 +227,8 @@ The overall structure of the profile response is as follows: "collector": [...] <4> } ], - "aggregations": [...] <5> + "aggregations": [...], <5> + "fetch": {...} <6> } ] } @@ -208,15 +240,14 @@ The overall structure of the profile response is as follows: // TESTRESPONSE[s/"query": \[...\]/"query": $body.$_path/] // TESTRESPONSE[s/"collector": \[...\]/"collector": $body.$_path/] // TESTRESPONSE[s/"aggregations": \[...\]/"aggregations": []/] +// TESTRESPONSE[s/"fetch": \{...\}/"fetch": $body.$_path/] <1> A profile is returned for each shard that participated in the response, and is identified by a unique ID. -<2> Each profile contains a section which holds details about the query -execution. -<3> Each profile has a single time representing the cumulative rewrite time. -<4> Each profile also contains a section about the Lucene Collectors which run -the search. -<5> Each profile contains a section which holds the details about the -aggregation execution. +<2> Query timings and other debugging information. +<3> The cumulative rewrite time. +<4> Names and invocation timings for each collector. +<5> Aggregation timings, invocation counts, and debug information. +<6> Fetch timing and debug information. Because a search request may be executed against one or more shards in an index, and a search may cover one or more indices, the top level element in the profile @@ -295,7 +326,7 @@ Using our previous `match` query example, let's analyze the `query` section: ] -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n/] -// TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] +// TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": [], "fetch": $body.$_path}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"breakdown": \{...\}/"breakdown": $body.$_path/] <1> The breakdown timings are omitted for simplicity. @@ -347,7 +378,7 @@ Lucene execution: } -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:get message:search",\n"time_in_nanos": $body.$_path,/] -// TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] +// TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": [], "fetch": $body.$_path}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] Timings are listed in wall-clock nanoseconds and are not normalized at all. All @@ -448,7 +479,7 @@ Looking at the previous example: ] -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": $body.$_path,\n"rewrite_time": $body.$_path,/] -// TESTRESPONSE[s/]$/]}], "aggregations": []}]}}/] +// TESTRESPONSE[s/]$/]}], "aggregations": [], "fetch": $body.$_path}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] @@ -569,7 +600,7 @@ GET /my-index-000001/_search } -------------------------------------------------- // TEST[setup:my_index] -// TEST[s/_search/_search\?filter_path=profile.shards.id,profile.shards.searches,profile.shards.aggregations/] +// TEST[s/_search/_search\?filter_path=profile.shards.id,profile.shards.searches,profile.shards.aggregations,profile.shards.fetch/] This example has: @@ -673,13 +704,15 @@ The API returns the following result: ] } ], - "aggregations": [...] <1> + "aggregations": [...], <1> + "fetch": {...} } ] } } -------------------------------------------------- // TESTRESPONSE[s/"aggregations": \[\.\.\.\]/"aggregations": $body.$_path/] +// TESTRESPONSE[s/"fetch": \{\.\.\.\}/"fetch": $body.$_path/] // TESTRESPONSE[s/\.\.\.//] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[my-index-000001\]\[0\]"/"id": $body.profile.shards.0.id/] diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 21081e86a16a1..830bee1f35588 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -40,6 +40,16 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { this.highlightPhase = new HighlightPhase(highlighters); } + @Override + public String name() { + return "percolator_highlight"; + } + + @Override + public String description() { + return "annotates text matching percolator queries"; + } + @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { if (fetchContext.highlight() == null) { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index aef0185f9ffda..0af4bf312f959 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -44,6 +44,16 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase { static final String FIELD_NAME_PREFIX = "_percolator_document_slot"; + @Override + public String name() { + return "percolator_matched"; + } + + @Override + public String description() { + return "indicates which documents match a percolator query"; + } + @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) throws IOException { diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index 70f40c413adf0..56f1dcbf3e0ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -892,3 +892,53 @@ Test token_count inside nested field doesn't fail: body: _source: false fields: [ "*" ] + + +--- +profile: + - do: + indices.create: + index: test + body: + mappings: + properties: + keyword: + type: keyword + integer_range: + type: integer_range + + - do: + index: + index: test + id: 1 + refresh: true + body: + keyword: [ "a", "b" ] + integer_range: + gte: 0 + lte: 42 + + - do: + search: + index: test + body: + _source: false + profile: true + fields: [keyword, integer_range] + + - is_true: hits.hits.0._id + - match: { hits.hits.0.fields.keyword.0: a } + - match: { hits.hits.0.fields.keyword.1: b } + - match: { hits.hits.0.fields.integer_range.0.gte: 0 } + - match: { hits.hits.0.fields.integer_range.0.lte: 42 } + - gt: { profile.shards.0.fetch.time_in_nanos: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - match: { profile.shards.0.fetch.children.0.type: fields } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index 2cde1c46a1bff..6dccf4b3a7eec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -24,10 +24,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.shard.IndexShard; @@ -53,6 +53,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.tasks.TaskId; @@ -81,19 +82,30 @@ public List getAggregations() { @Override public List getFetchSubPhases(FetchPhaseConstructionContext context) { - /** - * Set up a fetch sub phase that throws an exception on indices whose name that start with "boom". - */ - return Collections.singletonList(fetchContext -> new FetchSubPhaseProcessor() { + return Collections.singletonList(new FetchSubPhase() { @Override - public void setNextReader(LeafReaderContext readerContext) { + public String name() { + return "boom"; } @Override - public void process(FetchSubPhase.HitContext hitContext) { - if (fetchContext.getIndexName().startsWith("boom")) { - throw new RuntimeException("boom"); - } + public String description() { + return "throws an exception on indices whose name start with 'boom'"; + } + + @Override + public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) throws IOException { + return new FetchSubPhaseProcessor() { + @Override + public void setNextReader(LeafReaderContext readerContext) {} + + @Override + public void process(FetchSubPhase.HitContext hitContext) { + if (fetchContext.getIndexName().startsWith("boom")) { + throw new RuntimeException("boom"); + } + } + }; } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index e93875d6e1748..ea75743a6e1cb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -98,6 +98,16 @@ public List> getSearchExts() { private static final class TermVectorsFetchSubPhase implements FetchSubPhase { private static final String NAME = "term_vectors_fetch"; + @Override + public String name() { + return NAME; + } + + @Override + public String description() { + return "test"; + } + @Override public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) { return new FetchSubPhaseProcessor() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index ddd66e0dc4ef1..0937832f0bea5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.profile.ProfileResult; -import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.Instant; @@ -119,10 +119,10 @@ public void testSimpleProfile() { SearchResponse response = client().prepareSearch("idx").setProfile(true) .addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)).get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -164,10 +164,10 @@ public void testMultiLevelProfile() { ) ).get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -247,10 +247,10 @@ public void testMultiLevelProfileBreadthFirst() { .collectMode(SubAggCollectionMode.BREADTH_FIRST).field(TAG_FIELD).subAggregation(avg("avg").field(NUMBER_FIELD)))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -317,10 +317,10 @@ public void testDiversifiedAggProfile() { .subAggregation(max("max").field(NUMBER_FIELD))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -377,10 +377,10 @@ public void testComplexProfile() { .subAggregation(max("max").field(NUMBER_FIELD))))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -581,7 +581,7 @@ public void testNoProfile() { .subAggregation(max("max").field(NUMBER_FIELD))))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(0)); } @@ -611,10 +611,10 @@ public void testFilterByFilter() throws InterruptedException, IOException { .subAggregation(new MaxAggregationBuilder("m").field("date"))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -698,10 +698,10 @@ public void testDateHistogramFilterByFilterDisabled() throws InterruptedExceptio .addAggregation(new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries)); - for (ProfileShardResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 3c760b340b5ce..5392aa8f43f35 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.profile.ProfileResult; -import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -77,7 +77,7 @@ public void testProfileQuery() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shard : resp.getProfileResults().entrySet()) { + for (Map.Entry shard : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -210,11 +210,11 @@ public void testSimpleMatch() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .get(); - Map p = resp.getProfileResults(); + Map p = resp.getProfileResults(); assertNotNull(p); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertEquals(result.getQueryName(), "TermQuery"); @@ -257,11 +257,11 @@ public void testBool() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .get(); - Map p = resp.getProfileResults(); + Map p = resp.getProfileResults(); assertNotNull(p); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertEquals(result.getQueryName(), "BooleanQuery"); @@ -329,7 +329,7 @@ public void testEmptyBool() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -381,7 +381,7 @@ public void testCollapsingBool() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -428,7 +428,7 @@ public void testBoosting() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -475,7 +475,7 @@ public void testDisMaxRange() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -521,7 +521,7 @@ public void testRange() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -575,7 +575,7 @@ public void testPhrase() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 31bbff1b22634..9b2c3a3799d1c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -40,7 +40,8 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.suggest.Suggest; @@ -290,7 +291,7 @@ public InternalSearchResponse merge(boolean ignoreFrom, ReducedQueryPhase reduce assert currentOffset == sortedDocs.length : "expected no more score doc slices"; } } - return reducedQueryPhase.buildResponse(hits); + return reducedQueryPhase.buildResponse(hits, fetchResults); } private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, @@ -419,7 +420,7 @@ ReducedQueryPhase reducedQueryPhase(Collection quer // count the total (we use the query result provider here, since we might not get any hits (we scrolled past them)) final Map>> groupedSuggestions = hasSuggest ? new HashMap<>() : Collections.emptyMap(); - final Map profileResults = hasProfileResults ? new HashMap<>(queryResults.size()) + final Map profileShardResults = hasProfileResults ? new HashMap<>(queryResults.size()) : Collections.emptyMap(); int from = 0; int size = 0; @@ -449,7 +450,7 @@ ReducedQueryPhase reducedQueryPhase(Collection quer } if (hasProfileResults) { String key = result.getSearchShardTarget().toString(); - profileResults.put(key, result.consumeProfileResult()); + profileShardResults.put(key, result.consumeProfileResult()); } } final Suggest reducedSuggest; @@ -462,11 +463,13 @@ ReducedQueryPhase reducedQueryPhase(Collection quer reducedCompletionSuggestions = reducedSuggest.filter(CompletionSuggestion.class); } final InternalAggregations aggregations = reduceAggs(aggReduceContextBuilder, performFinalReduce, bufferedAggs); - final SearchProfileShardResults shardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults); + final SearchProfileShardResults profileResults = profileShardResults.isEmpty() + ? null + : new SearchProfileShardResults(profileShardResults); final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, bufferedTopDocs, from, size, reducedCompletionSuggestions); final TotalHits totalHits = topDocsStats.getTotalHits(); return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(), - topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, shardResults, sortedTopDocs, + topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, profileResults, sortedTopDocs, sortValueFormats, numReducePhases, size, from, false); } @@ -535,7 +538,7 @@ public static final class ReducedQueryPhase { // the reduced internal aggregations final InternalAggregations aggregations; // the reduced profile results - final SearchProfileShardResults shardResults; + final SearchProfileShardResults searchPhaseProfileResults; // the number of reduces phases final int numReducePhases; //encloses info about the merged top docs, the sort fields used to sort the score docs etc. @@ -549,9 +552,22 @@ public static final class ReducedQueryPhase { // sort value formats used to sort / format the result final DocValueFormat[] sortValueFormats; - ReducedQueryPhase(TotalHits totalHits, long fetchHits, float maxScore, boolean timedOut, Boolean terminatedEarly, Suggest suggest, - InternalAggregations aggregations, SearchProfileShardResults shardResults, SortedTopDocs sortedTopDocs, - DocValueFormat[] sortValueFormats, int numReducePhases, int size, int from, boolean isEmptyResult) { + ReducedQueryPhase( + TotalHits totalHits, + long fetchHits, + float maxScore, + boolean timedOut, + Boolean terminatedEarly, + Suggest suggest, + InternalAggregations aggregations, + SearchProfileShardResults searchPhaseProfileResults, + SortedTopDocs sortedTopDocs, + DocValueFormat[] sortValueFormats, + int numReducePhases, + int size, + int from, + boolean isEmptyResult + ) { if (numReducePhases <= 0) { throw new IllegalArgumentException("at least one reduce phase must have been applied but was: " + numReducePhases); } @@ -562,7 +578,7 @@ public static final class ReducedQueryPhase { this.terminatedEarly = terminatedEarly; this.suggest = suggest; this.aggregations = aggregations; - this.shardResults = shardResults; + this.searchPhaseProfileResults = searchPhaseProfileResults; this.numReducePhases = numReducePhases; this.sortedTopDocs = sortedTopDocs; this.size = size; @@ -575,8 +591,18 @@ public static final class ReducedQueryPhase { * Creates a new search response from the given merged hits. * @see #merge(boolean, ReducedQueryPhase, Collection, IntFunction) */ - public InternalSearchResponse buildResponse(SearchHits hits) { - return new InternalSearchResponse(hits, aggregations, suggest, shardResults, timedOut, terminatedEarly, numReducePhases); + public InternalSearchResponse buildResponse(SearchHits hits, Collection fetchResults) { + SearchProfileResults profileResults = mergeProfile(fetchResults); + return new InternalSearchResponse(hits, aggregations, suggest, profileResults, timedOut, terminatedEarly, numReducePhases); + } + + private SearchProfileResults mergeProfile(Collection fetchResults) { + if (searchPhaseProfileResults == null) { + assert fetchResults.stream() + .allMatch(r -> r.fetchResult().profileResult() == null) : "found fetch profile without search profile"; + return null; + } + return searchPhaseProfileResults.merge(fetchResults); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 660869e344364..88bb8ccea050c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -11,18 +11,18 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.search.SearchHit; @@ -30,8 +30,8 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.ProfileShardResult; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -225,7 +225,7 @@ public String pointInTimeId() { * @return The profile results or an empty map */ @Nullable - public Map getProfileResults() { + public Map getProfileResults() { return internalResponse.profile(); } @@ -280,7 +280,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE SearchHits hits = null; Aggregations aggs = null; Suggest suggest = null; - SearchProfileShardResults profile = null; + SearchProfileResults profile = null; boolean timedOut = false; Boolean terminatedEarly = null; int numReducePhases = 1; @@ -318,8 +318,8 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE aggs = Aggregations.fromXContent(parser); } else if (Suggest.NAME.equals(currentFieldName)) { suggest = Suggest.fromXContent(parser); - } else if (SearchProfileShardResults.PROFILE_FIELD.equals(currentFieldName)) { - profile = SearchProfileShardResults.fromXContent(parser); + } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { + profile = SearchProfileResults.fromXContent(parser); } else if (RestActions._SHARDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == Token.FIELD_NAME) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index 0eee2f6ebfcd9..3de3c95324571 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -27,8 +27,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.ProfileShardResult; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; @@ -111,7 +111,7 @@ SearchResponse getMergedResponse(Clusters clusters) { //the current reduce phase counts as one int numReducePhases = 1; List failures = new ArrayList<>(); - Map profileResults = new HashMap<>(); + Map profileResults = new HashMap<>(); List aggs = new ArrayList<>(); Map shards = new TreeMap<>(); List topDocsList = new ArrayList<>(searchResponses.size()); @@ -187,7 +187,7 @@ SearchResponse getMergedResponse(Clusters clusters) { Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); InternalAggregations reducedAggs = InternalAggregations.topLevelReduce(aggs, aggReduceContextBuilder.forFinalReduction()); ShardSearchFailure[] shardFailures = failures.toArray(ShardSearchFailure.EMPTY_ARRAY); - SearchProfileShardResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults); + SearchProfileResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); //make failures ordering consistent between ordinary search and CCS by looking at the shard they come from Arrays.sort(shardFailures, FAILURES_COMPARATOR); InternalSearchResponse response = new InternalSearchResponse(mergedSearchHits, reducedAggs, suggest, profileShardResults, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index 72774df68639c..dc955aa377921 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.profile.ProfileShardResult; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -34,13 +34,20 @@ public class SearchResponseSections implements ToXContentFragment { protected final SearchHits hits; protected final Aggregations aggregations; protected final Suggest suggest; - protected final SearchProfileShardResults profileResults; + protected final SearchProfileResults profileResults; protected final boolean timedOut; protected final Boolean terminatedEarly; protected final int numReducePhases; - public SearchResponseSections(SearchHits hits, Aggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly, - SearchProfileShardResults profileResults, int numReducePhases) { + public SearchResponseSections( + SearchHits hits, + Aggregations aggregations, + Suggest suggest, + boolean timedOut, + Boolean terminatedEarly, + SearchProfileResults profileResults, + int numReducePhases + ) { this.hits = hits; this.aggregations = aggregations; this.suggest = suggest; @@ -83,7 +90,7 @@ public final int getNumReducePhases() { * * @return Profile results */ - public final Map profile() { + public final Map profile() { if (profileResults == null) { return Collections.emptyMap(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 861f48564d13c..77da80ad0f7e0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.Inject; @@ -36,9 +35,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; @@ -53,8 +53,8 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.ProfileShardResult; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -366,9 +366,10 @@ static void ccsRemoteReduce(TaskId parentTaskId, SearchRequest searchRequest, Or remoteClusterClient.search(ccsSearchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { - Map profileResults = searchResponse.getProfileResults(); - SearchProfileShardResults profile = profileResults == null || profileResults.isEmpty() - ? null : new SearchProfileShardResults(profileResults); + Map profileResults = searchResponse.getProfileResults(); + SearchProfileResults profile = profileResults == null || profileResults.isEmpty() + ? null + : new SearchProfileResults(profileResults); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchResponse.getHits(), (InternalAggregations) searchResponse.getAggregations(), searchResponse.getSuggest(), profile, searchResponse.isTimedOut(), searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 741d6787f3e5e..7bb05aa122d83 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -40,11 +40,16 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; class TopHitsAggregator extends MetricsAggregator { @@ -62,6 +67,7 @@ private static class Collectors { private final SubSearchContext subSearchContext; private final LongObjectPagedHashMap topDocsCollectors; + private final List fetchProfiles; TopHitsAggregator( SubSearchContext subSearchContext, @@ -73,6 +79,7 @@ private static class Collectors { super(name, context, parent, metadata); topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays()); this.subSearchContext = subSearchContext; + fetchProfiles = context.profiling() ? new ArrayList<>() : null; } @Override @@ -183,6 +190,9 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE subSearchContext.docIdsToLoad(docIdsToLoad, docIdsToLoad.length); subSearchContext.fetchPhase().execute(subSearchContext); FetchSearchResult fetchResult = subSearchContext.fetchResult(); + if (fetchProfiles != null) { + fetchProfiles.add(fetchResult.profileResult()); + } SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); for (int i = 0; i < internalHits.length; i++) { ScoreDoc scoreDoc = topDocs.scoreDocs[i]; @@ -226,6 +236,19 @@ public InternalTopHits buildEmptyAggregation() { ); } + @Override + public void collectDebugInfo(BiConsumer add) { + super.collectDebugInfo(add); + List> debug = new ArrayList<>(); + for (ProfileResult result : fetchProfiles) { + Map resultDebug = new HashMap<>(); + resultDebug.put("time", result.getTime()); + resultDebug.put("breakdown", result.getTimeBreakdown()); + debug.add(resultDebug); + } + add.accept("fetch_profile", debug); + } + @Override protected void doClose() { Releasables.close(topDocsCollectors); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 370b1021340ff..fb5ffb5e34667 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -14,12 +14,12 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.TotalHits; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.MappedFieldType; @@ -37,6 +37,7 @@ import org.elasticsearch.search.fetch.subphase.InnerHitsPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; +import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; @@ -78,11 +79,27 @@ public void execute(SearchContext context) { if (context.docIdsToLoadSize() == 0) { // no individual hits to process, so we shortcut - context.fetchResult().hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), - context.queryResult().getMaxScore())); + SearchHits hits = new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore()); + context.fetchResult().shardResult(hits, null); return; } + Profiler profiler = context.getProfilers() == null ? Profiler.NOOP : context.getProfilers().getFetchProfiler(); + profiler.start(); + SearchHits hits = null; + try { + hits = buildSearchHits(context, profiler); + } finally { + // Always stop profiling + ProfileResult profileResult = profiler.stop(); + // Only set the shardResults if building search hits was successful + if (hits != null) { + context.fetchResult().shardResult(hits, profileResult); + } + } + } + + private SearchHits buildSearchHits(SearchContext context, Profiler profiler) { DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; for (int index = 0; index < context.docIdsToLoadSize(); index++) { docs[index] = new DocIdToIndex(context.docIdsToLoad()[index], index); @@ -92,12 +109,13 @@ public void execute(SearchContext context) { Map> storedToRequestedFields = new HashMap<>(); FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); + profiler.visitor(fieldsVisitor); FetchContext fetchContext = new FetchContext(context); SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; - List processors = getProcessors(context.shardTarget(), fetchContext); + List processors = getProcessors(context.shardTarget(), fetchContext, profiler); NestedDocuments nestedDocuments = context.getSearchExecutionContext().getNestedDocuments(); int currentReaderIndex = -1; @@ -113,27 +131,33 @@ public void execute(SearchContext context) { try { int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); if (currentReaderIndex != readerIndex) { - currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); - currentReaderIndex = readerIndex; - if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader - && hasSequentialDocs && docs.length >= 10) { - // All the docs to fetch are adjacent but Lucene stored fields are optimized - // for random access and don't optimize for sequential access - except for merging. - // So we do a little hack here and pretend we're going to do merges in order to - // get better sequential access. - SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); - fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument; - } else { - fieldReader = currentReaderContext.reader()::document; - } - for (FetchSubPhaseProcessor processor : processors) { - processor.setNextReader(currentReaderContext); + profiler.startNextReader(); + try { + currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); + currentReaderIndex = readerIndex; + if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader + && hasSequentialDocs && docs.length >= 10) { + // All the docs to fetch are adjacent but Lucene stored fields are optimized + // for random access and don't optimize for sequential access - except for merging. + // So we do a little hack here and pretend we're going to do merges in order to + // get better sequential access. + SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); + fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument; + } else { + fieldReader = currentReaderContext.reader()::document; + } + for (FetchSubPhaseProcessor processor : processors) { + processor.setNextReader(currentReaderContext); + } + leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(currentReaderContext); + } finally { + profiler.stopNextReader(); } - leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(currentReaderContext); } assert currentReaderContext != null; HitContext hit = prepareHitContext( context, + profiler, leafNestedDocuments, fieldsVisitor, docId, @@ -148,22 +172,24 @@ public void execute(SearchContext context) { throw new FetchPhaseExecutionException(context.shardTarget(), "Error running fetch phase for doc [" + docId + "]", e); } } + for (FetchSubPhaseProcessor processor : processors) { + processor.done(); + } if (context.isCancelled()) { throw new TaskCancelledException("cancelled"); } TotalHits totalHits = context.queryResult().getTotalHits(); - context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore())); - + return new SearchHits(hits, totalHits, context.queryResult().getMaxScore()); } - List getProcessors(SearchShardTarget target, FetchContext context) { + List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) { try { List processors = new ArrayList<>(); for (FetchSubPhase fsp : fetchSubPhases) { FetchSubPhaseProcessor processor = fsp.getProcessor(context); if (processor != null) { - processors.add(processor); + processors.add(profiler.profile(fsp.name(), fsp.description(), processor)); } } return processors; @@ -233,6 +259,7 @@ private boolean sourceRequired(SearchContext context) { } private HitContext prepareHitContext(SearchContext context, + Profiler profiler, LeafNestedDocuments nestedDocuments, FieldsVisitor fieldsVisitor, int docId, @@ -241,9 +268,9 @@ private HitContext prepareHitContext(SearchContext context, CheckedBiConsumer storedFieldReader) throws IOException { if (nestedDocuments.advance(docId - subReaderContext.docBase) == null) { return prepareNonNestedHitContext( - context, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, storedFieldReader); + context, profiler, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, storedFieldReader); } else { - return prepareNestedHitContext(context, docId, nestedDocuments, storedToRequestedFields, + return prepareNestedHitContext(context, profiler, docId, nestedDocuments, storedToRequestedFields, subReaderContext, storedFieldReader); } } @@ -256,6 +283,7 @@ private HitContext prepareHitContext(SearchContext context, * allows fetch subphases that use the hit context to access the preloaded source. */ private HitContext prepareNonNestedHitContext(SearchContext context, + Profiler profiler, FieldsVisitor fieldsVisitor, int docId, Map> storedToRequestedFields, @@ -267,7 +295,7 @@ private HitContext prepareNonNestedHitContext(SearchContext context, return new HitContext(hit, subReaderContext, subDocId); } else { SearchHit hit; - loadStoredFields(context.getSearchExecutionContext()::getFieldType, fieldReader, fieldsVisitor, subDocId); + loadStoredFields(context.getSearchExecutionContext()::getFieldType, profiler, fieldReader, fieldsVisitor, subDocId); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); Map metaFields = new HashMap<>(); @@ -301,6 +329,7 @@ private HitContext prepareNonNestedHitContext(SearchContext context, */ @SuppressWarnings("unchecked") private HitContext prepareNestedHitContext(SearchContext context, + Profiler profiler, int topDocId, LeafNestedDocuments nestedInfo, Map> storedToRequestedFields, @@ -328,7 +357,13 @@ private HitContext prepareNestedHitContext(SearchContext context, } } else { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(searchExecutionContext::getFieldType, storedFieldReader, rootFieldsVisitor, nestedInfo.rootDoc()); + loadStoredFields( + searchExecutionContext::getFieldType, + profiler, + storedFieldReader, + rootFieldsVisitor, + nestedInfo.rootDoc() + ); rootId = rootFieldsVisitor.id(); if (needSource) { @@ -346,7 +381,13 @@ private HitContext prepareNestedHitContext(SearchContext context, Map metaFields = emptyMap(); if (context.hasStoredFields() && context.storedFieldsContext().fieldNames().isEmpty() == false) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); - loadStoredFields(searchExecutionContext::getFieldType, storedFieldReader, nestedFieldsVisitor, nestedInfo.doc()); + loadStoredFields( + searchExecutionContext::getFieldType, + profiler, + storedFieldReader, + nestedFieldsVisitor, + nestedInfo.doc() + ); if (nestedFieldsVisitor.fields().isEmpty() == false) { docFields = new HashMap<>(); metaFields = new HashMap<>(); @@ -390,11 +431,17 @@ private HitContext prepareNestedHitContext(SearchContext context, } private void loadStoredFields(Function fieldTypeLookup, + Profiler profileListener, CheckedBiConsumer fieldReader, FieldsVisitor fieldVisitor, int docId) throws IOException { - fieldVisitor.reset(); - fieldReader.accept(docId, fieldVisitor); - fieldVisitor.postProcess(fieldTypeLookup); + try { + profileListener.startLoadingStoredFields(); + fieldVisitor.reset(); + fieldReader.accept(docId, fieldVisitor); + fieldVisitor.postProcess(fieldTypeLookup); + } finally { + profileListener.stopLoadingStoredFields(); + } } private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fieldsVisitor, @@ -428,4 +475,57 @@ private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fi static boolean hasSequentialDocs(DocIdToIndex[] docs) { return docs.length > 0 && docs[docs.length-1].docId - docs[0].docId == docs.length - 1; } + + interface Profiler { + void start(); + + ProfileResult stop(); + + FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor processor); + + void visitor(FieldsVisitor fieldsVisitor); + + void startLoadingStoredFields(); + + void stopLoadingStoredFields(); + + void startNextReader(); + + void stopNextReader(); + + Profiler NOOP = new Profiler() { + @Override + public void start() {} + + @Override + public ProfileResult stop() { + return null; + } + + @Override + public void visitor(FieldsVisitor fieldsVisitor) {} + + @Override + public FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor processor) { + return processor; + } + + @Override + public void startLoadingStoredFields() {} + + @Override + public void stopLoadingStoredFields() {} + + @Override + public void startNextReader() {} + + @Override + public void stopNextReader() {} + + @Override + public String toString() { + return "noop"; + } + }; + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java new file mode 100644 index 0000000000000..e555adb8e1d0c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.fetch; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.index.fieldvisitor.FieldsVisitor; +import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; +import org.elasticsearch.search.profile.AbstractProfileBreakdown; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.Timer; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.TreeSet; + +import static java.util.stream.Collectors.toList; + +public class FetchProfiler implements FetchPhase.Profiler { + private FetchProfileBreakdown current; + + private long nanoTime() { + return System.nanoTime(); + } + + @Override + public void start() { + assert current == null; + current = new FetchProfileBreakdown(nanoTime()); + } + + @Override + public ProfileResult stop() { + ProfileResult result = current.result(nanoTime()); + current = null; + return result; + } + + @Override + public void visitor(FieldsVisitor fieldsVisitor) { + current.debug.put("stored_fields", fieldsVisitor == null ? List.of() : new TreeSet<>(fieldsVisitor.getFieldNames())); + } + + @Override + public FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor delegate) { + FetchSubPhaseProfileBreakdown breakdown = new FetchSubPhaseProfileBreakdown(type, description); + current.subPhases.add(breakdown); + return new FetchSubPhaseProcessor() { + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + Timer timer = breakdown.getTimer(FetchSubPhaseTiming.NEXT_READER); + timer.start(); + try { + delegate.setNextReader(readerContext); + } finally { + timer.stop(); + } + } + + @Override + public void process(HitContext hitContext) throws IOException { + Timer timer = breakdown.getTimer(FetchSubPhaseTiming.PROCESS); + timer.start(); + try { + delegate.process(hitContext); + } finally { + timer.stop(); + } + } + + @Override + public void done() { + breakdown.debug = delegate.getDebugInfo(); + } + }; + } + + @Override + public void startLoadingStoredFields() { + current.getTimer(FetchPhaseTiming.LOAD_STORED_FIELDS).start(); + } + + @Override + public void stopLoadingStoredFields() { + current.getTimer(FetchPhaseTiming.LOAD_STORED_FIELDS).stop(); + } + + @Override + public void startNextReader() { + current.getTimer(FetchPhaseTiming.NEXT_READER).start(); + } + + @Override + public void stopNextReader() { + current.getTimer(FetchPhaseTiming.NEXT_READER).stop(); + } + + static class FetchProfileBreakdown extends AbstractProfileBreakdown { + private final long start; + private final Map debug = new HashMap<>(); + private final List subPhases = new ArrayList<>(); + + FetchProfileBreakdown(long start) { + super(FetchPhaseTiming.class); + this.start = start; + } + + @Override + protected Map toDebugMap() { + return Map.copyOf(debug); + } + + ProfileResult result(long stop) { + List subPhases = this.subPhases.stream() + .sorted(Comparator.comparing(b -> b.type)) + .map(FetchSubPhaseProfileBreakdown::result) + .collect(toList()); + return new ProfileResult("fetch", "fetch", toBreakdownMap(), toDebugMap(), stop - start, subPhases); + } + } + + enum FetchPhaseTiming { + NEXT_READER, + LOAD_STORED_FIELDS; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + + static class FetchSubPhaseProfileBreakdown extends AbstractProfileBreakdown { + private final String type; + private final String description; + private Map debug; + + FetchSubPhaseProfileBreakdown(String type, String description) { + super(FetchSubPhaseTiming.class); + this.type = type; + this.description = description; + } + + @Override + protected Map toDebugMap() { + return debug; + } + + ProfileResult result() { + return new ProfileResult(type, description, toBreakdownMap(), toDebugMap(), toNodeTime(), List.of()); + } + } + + enum FetchSubPhaseTiming { + NEXT_READER, + PROCESS; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 5e9f6ac6580d3..31078a7b3bf9b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.fetch; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; @@ -15,6 +16,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.query.QuerySearchResult; import java.io.IOException; @@ -25,18 +27,34 @@ public final class FetchSearchResult extends SearchPhaseResult { // client side counter private transient int counter; + private ProfileResult profileResult; + public FetchSearchResult() { } + public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) { + this.contextId = id; + setSearchShardTarget(shardTarget); + } + public FetchSearchResult(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); hits = new SearchHits(in); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + profileResult = in.readOptionalWriteable(ProfileResult::new); + } else { + profileResult = null; + } } - public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) { - this.contextId = id; - setSearchShardTarget(shardTarget); + @Override + public void writeTo(StreamOutput out) throws IOException { + contextId.writeTo(out); + hits.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeOptionalWriteable(profileResult); + } } @Override @@ -49,9 +67,11 @@ public FetchSearchResult fetchResult() { return this; } - public void hits(SearchHits hits) { + public void shardResult(SearchHits hits, ProfileResult profileResult) { assert assertNoSearchTarget(hits); this.hits = hits; + assert this.profileResult == null; + this.profileResult = profileResult; } private boolean assertNoSearchTarget(SearchHits hits) { @@ -74,9 +94,7 @@ public int counterGetAndIncrement() { return counter++; } - @Override - public void writeTo(StreamOutput out) throws IOException { - contextId.writeTo(out); - hits.writeTo(out); + public ProfileResult profileResult() { + return profileResult; } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java index 8ab13515167eb..8b3566b77ed3d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java @@ -74,6 +74,16 @@ public IndexReader topLevelReader() { } } + /** + * Name of the phase to include in profile results. + */ + String name(); + + /** + * Description of the phase to include in profile results. + */ + String description(); + /** * Returns a {@link FetchSubPhaseProcessor} for this sub phase. * diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java index 3482e85875a94..39bc33e0234c6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import java.io.IOException; +import java.util.Map; /** * Executes the logic for a {@link FetchSubPhase} against a particular leaf reader and hit @@ -28,4 +29,16 @@ public interface FetchSubPhaseProcessor { */ void process(HitContext hitContext) throws IOException; + /** + * Called when profiling after processing all documents to get any extra + * debug information the phase collected. + */ + default Map getDebugInfo() { + return null; + } + + /** + * Called when all hits have been processed. + */ + default void done() {} } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index d4edd8f711801..0111f186e1671 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -20,6 +20,15 @@ * Explains the scoring calculations for the top hits. */ public final class ExplainPhase implements FetchSubPhase { + @Override + public String name() { + return "explain"; + } + + @Override + public String description() { + return "explain why the documents matched the top level query"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index 7bb04aaff7bd5..82c5fe4003ef7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -26,6 +26,15 @@ * Specifying {@code "docvalue_fields": ["field1", "field2"]} */ public final class FetchDocValuesPhase implements FetchSubPhase { + @Override + public String name() { + return "doc_values"; + } + + @Override + public String description() { + return "fetches doc_values"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index 4761d4d2b1627..79394942485a1 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -24,6 +24,15 @@ * retrieves the field values from _source and returns them as document fields. */ public final class FetchFieldsPhase implements FetchSubPhase { + @Override + public String name() { + return "fields"; + } + + @Override + public String description() { + return "fetch and normalize fields"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java index 0f176a7a2c622..442216b9bcd98 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java @@ -21,6 +21,15 @@ import java.io.IOException; public class FetchScorePhase implements FetchSubPhase { + @Override + public String name() { + return "scores"; + } + + @Override + public String description() { + return "calculates scores when they were not used by the original query"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index d27e9d6688ac3..de767cc02b2b4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -23,6 +23,15 @@ import java.util.Map; public final class FetchSourcePhase implements FetchSubPhase { + @Override + public String name() { + return "source"; + } + + @Override + public String description() { + return "load _source"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { @@ -34,6 +43,9 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { assert fetchSourceContext.fetchSource(); return new FetchSubPhaseProcessor() { + private int fastPath; + private int loadedNested; + @Override public void setNextReader(LeafReaderContext readerContext) { @@ -50,46 +62,52 @@ public void process(HitContext hitContext) { } hitExecute(fetchSourceContext, hitContext); } - }; - } - @SuppressWarnings("unchecked") - private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitContext) { + @SuppressWarnings("unchecked") + private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitContext) { + final boolean nestedHit = hitContext.hit().getNestedIdentity() != null; + SourceLookup source = hitContext.sourceLookup(); - final boolean nestedHit = hitContext.hit().getNestedIdentity() != null; - SourceLookup source = hitContext.sourceLookup(); + // If this is a parent document and there are no source filters, then add the source as-is. + if (nestedHit == false && containsFilters(fetchSourceContext) == false) { + hitContext.hit().sourceRef(source.internalSourceRef()); + fastPath++; + return; + } - // If this is a parent document and there are no source filters, then add the source as-is. - if (nestedHit == false && containsFilters(fetchSourceContext) == false) { - hitContext.hit().sourceRef(source.internalSourceRef()); - return; - } + // Otherwise, filter the source and add it to the hit. + Object value = source.filter(fetchSourceContext); + if (nestedHit) { + loadedNested++; + value = getNestedSource((Map) value, hitContext); + } - // Otherwise, filter the source and add it to the hit. - Object value = source.filter(fetchSourceContext); - if (nestedHit) { - value = getNestedSource((Map) value, hitContext); - } + try { + final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length()); + BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); + XContentBuilder builder = new XContentBuilder(source.sourceContentType().xContent(), streamOutput); + if (value != null) { + builder.value(value); + } else { + // This happens if the source filtering could not find the specified in the _source. + // Just doing `builder.value(null)` is valid, but the xcontent validation can't detect what format + // it is. In certain cases, for example response serialization we fail if no xcontent type can't be + // detected. So instead we just return an empty top level object. Also this is in inline with what was + // being return in this situation in 5.x and earlier. + builder.startObject(); + builder.endObject(); + } + hitContext.hit().sourceRef(BytesReference.bytes(builder)); + } catch (IOException e) { + throw new ElasticsearchException("Error filtering source", e); + } + } - try { - final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length()); - BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); - XContentBuilder builder = new XContentBuilder(source.sourceContentType().xContent(), streamOutput); - if (value != null) { - builder.value(value); - } else { - // This happens if the source filtering could not find the specified in the _source. - // Just doing `builder.value(null)` is valid, but the xcontent validation can't detect what format - // it is. In certain cases, for example response serialization we fail if no xcontent type can't be - // detected. So instead we just return an empty top level object. Also this is in inline with what was - // being return in this situation in 5.x and earlier. - builder.startObject(); - builder.endObject(); + @Override + public Map getDebugInfo() { + return Map.of("fast_path", fastPath, "loaded_nested", loadedNested); } - hitContext.hit().sourceRef(BytesReference.bytes(builder)); - } catch (IOException e) { - throw new ElasticsearchException("Error filtering source", e); - } + }; } private static boolean containsFilters(FetchSourceContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java index fc16e054e9f88..5adb6cf91f637 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java @@ -18,6 +18,15 @@ import java.io.IOException; public final class FetchVersionPhase implements FetchSubPhase { + @Override + public String name() { + return "version"; + } + + @Override + public String description() { + return "fetches version"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index 03182b7ff380d..dbd925a0f6bda 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -33,9 +33,19 @@ public InnerHitsPhase(FetchPhase fetchPhase) { this.fetchPhase = fetchPhase; } + @Override + public String name() { + return "inner_hits"; + } + + @Override + public String description() { + return "fetches matching nested or parent or child documents"; + } + @Override public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) { - if (searchContext.innerHits() == null) { + if (searchContext.innerHits() == null || searchContext.innerHits().getInnerHits().isEmpty()) { return null; } Map innerHits = searchContext.innerHits().getInnerHits(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 0fdccae3b8b8c..1fcca0b50cb12 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -25,6 +25,15 @@ import java.util.Map; public final class MatchedQueriesPhase implements FetchSubPhase { + @Override + public String name() { + return "matched_queries"; + } + + @Override + public String description() { + return "returns the _name of each named matching query"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { @@ -72,5 +81,4 @@ public void process(HitContext hitContext) { } }; } - } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java index 467e7e79acf7d..62445264f2edb 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java @@ -22,10 +22,19 @@ import java.util.List; public final class ScriptFieldsPhase implements FetchSubPhase { + @Override + public String name() { + return "script_fields"; + } + + @Override + public String description() { + return "run scripts and return their results"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { - if (context.scriptFields() == null) { + if (context.scriptFields() == null || context.scriptFields().fields().isEmpty()) { return null; } List scriptFields = context.scriptFields().fields(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java index 65c97f5887c3f..14384a5de8aec 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java @@ -18,6 +18,15 @@ import java.io.IOException; public final class SeqNoPrimaryTermPhase implements FetchSubPhase { + @Override + public String name() { + return "seq_no_primary_term"; + } + + @Override + public String description() { + return "fetch sequence number and primary term"; + } @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 379bae92cb8b4..ddc3046ea1a38 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -32,6 +32,16 @@ public HighlightPhase(Map highlighters) { this.highlighters = highlighters; } + @Override + public String name() { + return "highlight"; + } + + @Override + public String description() { + return "annotates matching text"; + } + @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.highlight() == null) { diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index 31590424c6813..b5be8039471cb 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.internal; +import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -15,10 +16,12 @@ import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; +import java.util.List; /** * {@link SearchResponseSections} subclass that can be serialized over the wire. @@ -33,20 +36,22 @@ public static InternalSearchResponse empty(boolean withTotalHits) { } public InternalSearchResponse(SearchHits hits, InternalAggregations aggregations, Suggest suggest, - SearchProfileShardResults profileResults, boolean timedOut, Boolean terminatedEarly, + SearchProfileResults profileResults, boolean timedOut, Boolean terminatedEarly, int numReducePhases) { super(hits, aggregations, suggest, timedOut, terminatedEarly, profileResults, numReducePhases); } public InternalSearchResponse(StreamInput in) throws IOException { super( - new SearchHits(in), - in.readBoolean() ? InternalAggregations.readFrom(in) : null, - in.readBoolean() ? new Suggest(in) : null, - in.readBoolean(), - in.readOptionalBoolean(), - in.readOptionalWriteable(SearchProfileShardResults::new), - in.readVInt() + new SearchHits(in), + in.readBoolean() ? InternalAggregations.readFrom(in) : null, + in.readBoolean() ? new Suggest(in) : null, + in.readBoolean(), + in.readOptionalBoolean(), + in.getVersion().onOrAfter(Version.V_8_0_0) + ? in.readOptionalWriteable(SearchProfileResults::new) + : in.readOptionalWriteable(SearchProfileShardResults::new).merge(List.of()), + in.readVInt() ); } diff --git a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java index e783cb2be2a1c..570424022274e 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -9,15 +9,15 @@ package org.elasticsearch.search.profile; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.InstantiatingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.TimeValue; import java.io.IOException; import java.util.Collections; @@ -30,13 +30,8 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * This class is the internal representation of a profiled Query, corresponding - * to a single node in the query tree. It is built after the query has finished executing - * and is merely a structured representation, rather than the entity that collects the timing - * profile (see InternalProfiler for that) - *

- * Each InternalProfileResult has a List of InternalProfileResults, which will contain - * "children" queries if applicable + * The result of a profiled *thing*, like a query or an aggregation. See + * {@link AbstractProfiler} for the statistic collection framework. */ public final class ProfileResult implements Writeable, ToXContentObject { static final ParseField TYPE = new ParseField("type"); diff --git a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java index 45066c73f0e39..3c3819927526c 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java +++ b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.profile; +import org.elasticsearch.search.fetch.FetchProfiler; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.profile.aggregation.AggregationProfiler; import org.elasticsearch.search.profile.query.QueryProfiler; @@ -20,18 +21,18 @@ public final class Profilers { private final ContextIndexSearcher searcher; - private final List queryProfilers; - private final AggregationProfiler aggProfiler; + private final List queryProfilers = new ArrayList<>(); + private final AggregationProfiler aggProfiler = new AggregationProfiler(); + private final FetchProfiler fetchProfiler = new FetchProfiler(); - /** Sole constructor. This {@link Profilers} instance will initially wrap one {@link QueryProfiler}. */ public Profilers(ContextIndexSearcher searcher) { this.searcher = searcher; - this.queryProfilers = new ArrayList<>(); - this.aggProfiler = new AggregationProfiler(); addQueryProfiler(); } - /** Switch to a new profile. */ + /** + * Begin profiling a new query. + */ public QueryProfiler addQueryProfiler() { QueryProfiler profiler = new QueryProfiler(); searcher.setProfiler(profiler); @@ -39,19 +40,25 @@ public QueryProfiler addQueryProfiler() { return profiler; } - /** Get the current profiler. */ + /** + * Get the profiler for the query we are currently processing. + */ public QueryProfiler getCurrentQueryProfiler() { return queryProfilers.get(queryProfilers.size() - 1); } - /** Return the list of all created {@link QueryProfiler}s so far. */ + /** + * The list of all {@link QueryProfiler}s created so far. + */ public List getQueryProfilers() { return Collections.unmodifiableList(queryProfilers); } - /** Return the {@link AggregationProfiler}. */ public AggregationProfiler getAggregationProfiler() { return aggProfiler; } + public FetchProfiler getFetchProfiler() { + return fetchProfiler; + } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java rename to server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java index 30b9232727714..9c2056c60dfd8 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/ProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java @@ -19,18 +19,21 @@ import java.util.Collections; import java.util.List; -public class ProfileShardResult implements Writeable { +public class SearchProfileQueryPhaseResult implements Writeable { private final List queryProfileResults; private final AggregationProfileShardResult aggProfileShardResult; - public ProfileShardResult(List queryProfileResults, AggregationProfileShardResult aggProfileShardResult) { + public SearchProfileQueryPhaseResult( + List queryProfileResults, + AggregationProfileShardResult aggProfileShardResult + ) { this.aggProfileShardResult = aggProfileShardResult; this.queryProfileResults = Collections.unmodifiableList(queryProfileResults); } - public ProfileShardResult(StreamInput in) throws IOException { + public SearchProfileQueryPhaseResult(StreamInput in) throws IOException { int profileSize = in.readVInt(); List queryProfileResults = new ArrayList<>(profileSize); for (int i = 0; i < profileSize; i++) { diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java new file mode 100644 index 0000000000000..eb53180b6e1d7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java @@ -0,0 +1,160 @@ +package org.elasticsearch.search.profile; + +import org.elasticsearch.Version; + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeSet; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +/** + * Profile results for all shards. + */ +public final class SearchProfileResults implements Writeable, ToXContentFragment { + + private static final String SEARCHES_FIELD = "searches"; + private static final String ID_FIELD = "id"; + private static final String SHARDS_FIELD = "shards"; + public static final String PROFILE_FIELD = "profile"; + + private Map shardResults; + + public SearchProfileResults(Map shardResults) { + this.shardResults = Collections.unmodifiableMap(shardResults); + } + + public SearchProfileResults(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + shardResults = in.readMap(StreamInput::readString, SearchProfileShardResult::new); + } else { + // Before 8.0.0 we only send the search profile result + shardResults = in.readMap( + StreamInput::readString, + i -> new SearchProfileShardResult(new SearchProfileQueryPhaseResult(in), null) + ); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeMap(shardResults, StreamOutput::writeString, (o, r) -> r.writeTo(o)); + } else { + // Before 8.0.0 we only send the search profile result + out.writeMap(shardResults, StreamOutput::writeString, (o, r) -> r.getSearch().writeTo(o)); + } + } + + public Map getShardResults() { + return shardResults; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(PROFILE_FIELD).startArray(SHARDS_FIELD); + // shardResults is a map, but we print entries in a json array, which is ordered. + // we sort the keys of the map, so that toXContent always prints out the same array order + TreeSet sortedKeys = new TreeSet<>(shardResults.keySet()); + for (String key : sortedKeys) { + builder.startObject(); + builder.field(ID_FIELD, key); + builder.startArray(SEARCHES_FIELD); + SearchProfileShardResult profileShardResult = shardResults.get(key); + for (QueryProfileShardResult result : profileShardResult.getSearch().getQueryProfileResults()) { + result.toXContent(builder, params); + } + builder.endArray(); + profileShardResult.getSearch().getAggregationProfileResults().toXContent(builder, params); + if (profileShardResult.getFetch() != null) { + builder.field("fetch"); + profileShardResult.getFetch().toXContent(builder, params); + } + builder.endObject(); + } + builder.endArray().endObject(); + return builder; + } + + public static SearchProfileResults fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + Map profileResults = new HashMap<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_ARRAY) { + if (SHARDS_FIELD.equals(parser.currentName())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + parseProfileResultsEntry(parser, profileResults); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + } + } + return new SearchProfileResults(profileResults); + } + + private static void parseProfileResultsEntry(XContentParser parser, + Map searchProfileResults) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + List queryProfileResults = new ArrayList<>(); + AggregationProfileShardResult aggProfileShardResult = null; + ProfileResult fetchResult = null; + String id = null; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (ID_FIELD.equals(currentFieldName)) { + id = parser.text(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (SEARCHES_FIELD.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + queryProfileResults.add(QueryProfileShardResult.fromXContent(parser)); + } + } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { + aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + fetchResult = ProfileResult.fromXContent(parser); + } else { + parser.skipChildren(); + } + } + SearchProfileShardResult result = new SearchProfileShardResult( + new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult), + fetchResult + ); + searchProfileResults.put(id, result); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java new file mode 100644 index 0000000000000..61af413062e09 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; + +import java.io.IOException; +import java.util.List; + +public class SearchProfileShardResult implements Writeable { + private final SearchProfileQueryPhaseResult search; + + private final ProfileResult fetch; + + public SearchProfileShardResult(SearchProfileQueryPhaseResult search, ProfileResult fetch) { + this.search = search; + this.fetch = fetch; + } + + public SearchProfileShardResult(StreamInput in) throws IOException { + search = new SearchProfileQueryPhaseResult(in); + fetch = in.readOptionalWriteable(ProfileResult::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + search.writeTo(out); + out.writeOptionalWriteable(fetch); + } + + public SearchProfileQueryPhaseResult getSearch() { + return search; + } + + public ProfileResult getFetch() { + return fetch; + } + + public List getQueryProfileResults() { + return search.getQueryProfileResults(); + } + + public AggregationProfileShardResult getAggregationProfileResults() { + return search.getAggregationProfileResults(); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java index 89dd2b3818ca1..c9138ca68549d 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java @@ -11,10 +11,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; import org.elasticsearch.search.profile.aggregation.AggregationProfiler; import org.elasticsearch.search.profile.query.QueryProfileShardResult; @@ -22,28 +20,20 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.TreeSet; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** * A container class to hold all the profile results across all shards. Internally * holds a map of shard ID -> Profiled results */ -public final class SearchProfileShardResults implements Writeable, ToXContentFragment { - - private static final String SEARCHES_FIELD = "searches"; - private static final String ID_FIELD = "id"; - private static final String SHARDS_FIELD = "shards"; - public static final String PROFILE_FIELD = "profile"; - - private Map shardResults; +public final class SearchProfileShardResults implements Writeable { // Rename to SearchProfileQueryPhaseResults + private Map shardResults; - public SearchProfileShardResults(Map shardResults) { + public SearchProfileShardResults(Map shardResults) { this.shardResults = Collections.unmodifiableMap(shardResults); } @@ -53,111 +43,59 @@ public SearchProfileShardResults(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { String key = in.readString(); - ProfileShardResult shardResult = new ProfileShardResult(in); + SearchProfileQueryPhaseResult shardResult = new SearchProfileQueryPhaseResult(in); shardResults.put(key, shardResult); } shardResults = Collections.unmodifiableMap(shardResults); } - public Map getShardResults() { - return this.shardResults; + /** + * Merge the profiling information from some fetch results into this + * profiling information. + */ + public SearchProfileResults merge(Collection fetchResults) { + Map mergedShardResults = new HashMap<>(shardResults.size()); + for (SearchPhaseResult r : fetchResults) { + FetchSearchResult fr = r.fetchResult(); + String key = fr.getSearchShardTarget().toString(); + SearchProfileQueryPhaseResult search = shardResults.get(key); + if (search == null) { + throw new IllegalStateException( + "Profile returned fetch information for [" + + key + + "] but didn't return search information. Search keys were " + + shardResults.keySet() + ); + } + mergedShardResults.put(key, new SearchProfileShardResult(search, fr.profileResult())); + } + for (Map.Entry e : shardResults.entrySet()) { + if (false == mergedShardResults.containsKey(e.getKey())) { + mergedShardResults.put(e.getKey(), new SearchProfileShardResult(e.getValue(), null)); + } + } + return new SearchProfileResults(mergedShardResults); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeInt(shardResults.size()); - for (Map.Entry entry : shardResults.entrySet()) { + for (Map.Entry entry : shardResults.entrySet()) { out.writeString(entry.getKey()); entry.getValue().writeTo(out); } } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(PROFILE_FIELD).startArray(SHARDS_FIELD); - // shardResults is a map, but we print entries in a json array, which is ordered. - // we sort the keys of the map, so that toXContent always prints out the same array order - TreeSet sortedKeys = new TreeSet<>(shardResults.keySet()); - for (String key : sortedKeys) { - builder.startObject(); - builder.field(ID_FIELD, key); - builder.startArray(SEARCHES_FIELD); - ProfileShardResult profileShardResult = shardResults.get(key); - for (QueryProfileShardResult result : profileShardResult.getQueryProfileResults()) { - result.toXContent(builder, params); - } - builder.endArray(); - profileShardResult.getAggregationProfileResults().toXContent(builder, params); - builder.endObject(); - } - builder.endArray().endObject(); - return builder; - } - - public static SearchProfileShardResults fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - Map searchProfileResults = new HashMap<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.START_ARRAY) { - if (SHARDS_FIELD.equals(parser.currentName())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - parseSearchProfileResultsEntry(parser, searchProfileResults); - } - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - parser.skipChildren(); - } - } - return new SearchProfileShardResults(searchProfileResults); - } - - private static void parseSearchProfileResultsEntry(XContentParser parser, - Map searchProfileResults) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - List queryProfileResults = new ArrayList<>(); - AggregationProfileShardResult aggProfileShardResult = null; - String id = null; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (ID_FIELD.equals(currentFieldName)) { - id = parser.text(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (SEARCHES_FIELD.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - queryProfileResults.add(QueryProfileShardResult.fromXContent(parser)); - } - } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { - aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - searchProfileResults.put(id, new ProfileShardResult(queryProfileResults, aggProfileShardResult)); - } - /** * Helper method to convert Profiler into InternalProfileShardResults, which * can be serialized to other nodes, emitted as JSON, etc. * * @param profilers * The {@link Profilers} to convert into results - * @return A {@link ProfileShardResult} representing the results for this + * @return A {@link SearchProfileQueryPhaseResult} representing the results for this * shard */ - public static ProfileShardResult buildShardResults(Profilers profilers) { + public static SearchProfileQueryPhaseResult buildShardResults(Profilers profilers) { List queryProfilers = profilers.getQueryProfilers(); AggregationProfiler aggProfiler = profilers.getAggregationProfiler(); List queryResults = new ArrayList<>(queryProfilers.size()); @@ -167,6 +105,6 @@ public static ProfileShardResult buildShardResults(Profilers profilers) { queryResults.add(result); } AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree()); - return new ProfileShardResult(queryResults, aggResults); + return new SearchProfileQueryPhaseResult(queryResults, aggResults); } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 13ed9c5837768..9ca0d0e528410 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -52,7 +52,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.profile.query.InternalProfileCollector; import org.elasticsearch.search.rescore.RescorePhase; @@ -144,7 +144,7 @@ public void execute(SearchContext searchContext) throws QueryPhaseExecutionExcep aggregationPhase.execute(searchContext); if (searchContext.getProfilers() != null) { - ProfileShardResult shardResults = SearchProfileShardResults + SearchProfileQueryPhaseResult shardResults = SearchProfileShardResults .buildShardResults(searchContext.getProfilers()); searchContext.queryResult().profileResults(shardResults); } diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index e66321135f71d..f35cb57314dba 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -24,7 +24,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; -import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -53,7 +53,7 @@ public final class QuerySearchResult extends SearchPhaseResult { private Suggest suggest; private boolean searchTimedOut; private Boolean terminatedEarly = null; - private ProfileShardResult profileShardResults; + private SearchProfileQueryPhaseResult profileShardResults; private boolean hasProfileResults; private long serviceTimeEWMA = -1; private int nodeQueueSize = -1; @@ -225,11 +225,11 @@ public DelayableWriteable aggregations() { * This allows to free up memory once the profiled result is consumed. * @throws IllegalStateException if the profiled result has already been consumed. */ - public ProfileShardResult consumeProfileResult() { + public SearchProfileQueryPhaseResult consumeProfileResult() { if (profileShardResults == null) { throw new IllegalStateException("profile results already consumed"); } - ProfileShardResult result = profileShardResults; + SearchProfileQueryPhaseResult result = profileShardResults; profileShardResults = null; return result; } @@ -252,7 +252,7 @@ public void consumeAll() { * Sets the finalized profiling results for this query * @param shardResults The finalized profile */ - public void profileResults(ProfileShardResult shardResults) { + public void profileResults(SearchProfileQueryPhaseResult shardResults) { this.profileShardResults = shardResults; hasProfileResults = shardResults != null; } @@ -340,7 +340,7 @@ public void readFromWithId(ShardSearchContextId id, StreamInput in) throws IOExc } searchTimedOut = in.readBoolean(); terminatedEarly = in.readOptionalBoolean(); - profileShardResults = in.readOptionalWriteable(ProfileShardResult::new); + profileShardResults = in.readOptionalWriteable(SearchProfileQueryPhaseResult::new); hasProfileResults = profileShardResults != null; serviceTimeEWMA = in.readZLong(); nodeQueueSize = in.readInt(); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index c4edbd240fad7..1e8a057df275c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -26,15 +26,24 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.transport.Transport; +import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + public class FetchSearchPhaseTests extends ESTestCase { + private static final long FETCH_PROFILE_TIME = 555; public void testShortcutQueryAndFetchOptimization() { SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder()); @@ -43,6 +52,7 @@ public void testShortcutQueryAndFetchOptimization() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 1, exc -> {}); boolean hasHits = randomBoolean(); + boolean profiled = hasHits && randomBoolean(); final int numHits; if (hasHits) { QuerySearchResult queryResult = new QuerySearchResult(); @@ -50,9 +60,12 @@ public void testShortcutQueryAndFetchOptimization() { new ShardId("index", "index", 0), null, OriginalIndices.NONE)); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]); + addProfiling(profiled, queryResult); queryResult.size(1); FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + fetchResult.setSearchShardTarget(queryResult.getSearchShardTarget()); + SearchHits hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + fetchResult.shardResult(hits, fetchProfile(profiled)); QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult); fetchSearchResult.setShardIndex(0); results.consumeResult(fetchSearchResult, () -> {}); @@ -77,9 +90,21 @@ public void run() { if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } + assertProfiles(profiled, 1, searchResponse); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); } + private void assertProfiles(boolean profiled, int totalShards, SearchResponse searchResponse) { + if (false == profiled) { + assertThat(searchResponse.getProfileResults(), equalTo(Map.of())); + return; + } + assertThat(searchResponse.getProfileResults().values().size(), equalTo(totalShards)); + for (SearchProfileShardResult profileShardResult : searchResponse.getProfileResults().values()) { + assertThat(profileShardResult.getFetch().getTime(), equalTo(FETCH_PROFILE_TIME)); + } + } + public void testFetchTwoDocument() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder()); @@ -87,22 +112,26 @@ public void testFetchTwoDocument() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = randomIntBetween(2, 10); + boolean profiled = randomBoolean(); + ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx1, new SearchShardTarget("node1", new ShardId("test", "na", 0), - null, OriginalIndices.NONE), null); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - queryResult = new QuerySearchResult( - ctx2, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -110,14 +139,16 @@ public void testFetchTwoDocument() { public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); + SearchHits hits; if (request.contextId().equals(ctx2)) { - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard2Target); + hits = new SearchHits(new SearchHit[] { new SearchHit(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F); } else { assertEquals(ctx1, request.contextId()); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + fetchResult.setSearchShardTarget(shard1Target); + hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); } + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); } }; @@ -138,6 +169,7 @@ public void run() { assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(2, searchResponse.getSuccessfulShards()); + assertProfiles(profiled, 2, searchResponse); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); } @@ -148,21 +180,25 @@ public void testFailFetchOneDoc() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = randomIntBetween(2, 10); + boolean profiled = randomBoolean(); + final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -171,13 +207,17 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { if (request.contextId().getId() == 321) { FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard1Target); + SearchHits hits = new SearchHits( + new SearchHit[] { new SearchHit(84) }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + 2.0F + ); + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); } else { listener.onFailure(new MockDirectoryWrapper.FakeIOException()); } - } }; FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, @@ -199,6 +239,18 @@ public void run() { assertEquals(1, searchResponse.getShardFailures().length); assertTrue(searchResponse.getShardFailures()[0].getCause() instanceof MockDirectoryWrapper.FakeIOException); assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); + if (profiled) { + /* + * Shard 2 failed to fetch but still searched so it will have + * profiling information for the search on both shards but only + * for the fetch on the successful shard. + */ + assertThat(searchResponse.getProfileResults().values().size(), equalTo(2)); + assertThat(searchResponse.getProfileResults().get(shard1Target.toString()).getFetch(), nullValue()); + assertThat(searchResponse.getProfileResults().get(shard2Target.toString()).getFetch().getTime(), equalTo(FETCH_PROFILE_TIME)); + } else { + assertThat(searchResponse.getProfileResults(), equalTo(Map.of())); + } assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx)); } @@ -206,18 +258,22 @@ public void testFetchDocsConcurrently() throws InterruptedException { int resultSetSize = randomIntBetween(0, 100); // we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert... int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard + boolean profiled = randomBoolean(); + SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); QueryPhaseResultConsumer results = controller.newSearchPhaseResults(EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), numHits, exc -> {}); + SearchShardTarget[] shardTargets = new SearchShardTarget[numHits]; for (int i = 0; i < numHits; i++) { - QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), shardTargets[i], null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(i); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); } mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -226,8 +282,13 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { new Thread(() -> { FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit((int) (request.contextId().getId()+1))}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 100F)); + fetchResult.setSearchShardTarget(shardTargets[(int) request.contextId().getId()]); + SearchHits hits = new SearchHits( + new SearchHit[] { new SearchHit((int) (request.contextId().getId() + 1)) }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + 100F + ); + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); }).start(); } @@ -257,6 +318,19 @@ public void run() { } assertEquals(0, searchResponse.getFailedShards()); assertEquals(numHits, searchResponse.getSuccessfulShards()); + if (profiled) { + assertThat(searchResponse.getProfileResults().values().size(), equalTo(numHits)); + int count = 0; + for (SearchProfileShardResult profileShardResult : searchResponse.getProfileResults().values()) { + if (profileShardResult.getFetch() != null) { + count++; + assertThat(profileShardResult.getFetch().getTime(), equalTo(FETCH_PROFILE_TIME)); + } + } + assertThat(count, equalTo(Math.min(numHits, resultSetSize))); + } else { + assertThat(searchResponse.getProfileResults(), equalTo(Map.of())); + } int sizeReleasedContexts = Math.max(0, numHits - resultSetSize); // all non fetched results will be freed assertEquals(mockSearchPhaseContext.releasedSearchContexts.toString(), sizeReleasedContexts, mockSearchPhaseContext.releasedSearchContexts.size()); @@ -270,22 +344,26 @@ public void testExceptionFailsPhase() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = randomIntBetween(2, 10); - QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), - null, OriginalIndices.NONE), null); + boolean profiled = randomBoolean(); + + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); + AtomicInteger numFetches = new AtomicInteger(0); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override @@ -295,14 +373,16 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe if (numFetches.incrementAndGet() == 1) { throw new RuntimeException("BOOM"); } + SearchHits hits; if (request.contextId().getId() == 321) { - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard2Target); + hits = new SearchHits(new SearchHit[] { new SearchHit(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F); } else { + fetchResult.setSearchShardTarget(shard1Target); assertEquals(request, 123); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); } + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); } }; @@ -328,22 +408,26 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = 1; + boolean profiled = randomBoolean(); + final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx1, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - queryResult = new QuerySearchResult(ctx2, - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -352,8 +436,13 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); if (request.contextId().getId() == 321) { - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard1Target); + SearchHits hits = new SearchHits( + new SearchHit[] { new SearchHit(84) }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + 2.0F + ); + fetchResult.shardResult(hits, fetchProfile(profiled)); } else { fail("requestID 123 should not be fetched but was"); } @@ -377,7 +466,22 @@ public void run() { assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(2, searchResponse.getSuccessfulShards()); + if (profiled) { + assertThat(searchResponse.getProfileResults().size(), equalTo(2)); + assertThat(searchResponse.getProfileResults().get(shard1Target.toString()).getFetch(), nullValue()); + assertThat(searchResponse.getProfileResults().get(shard2Target.toString()).getFetch().getTime(), equalTo(FETCH_PROFILE_TIME)); + } assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx1)); } + + private void addProfiling(boolean profiled, QuerySearchResult queryResult) { + if (profiled) { + queryResult.profileResults(new SearchProfileQueryPhaseResult(List.of(), null)); + } + } + + private ProfileResult fetchProfile(boolean profiled) { + return profiled ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), FETCH_PROFILE_TIME, List.of()) : null; + } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 3482467693091..ee474e7141702 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -374,7 +374,8 @@ private static AtomicArray generateFetchResults(int nShards, } } SearchHit[] hits = searchHits.toArray(new SearchHit[0]); - fetchSearchResult.hits(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore)); + // TODO unit test for generated fetch profile + fetchSearchResult.shardResult(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore), null); fetchResults.set(shardIndex, fetchSearchResult); } return fetchResults; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index d74817fb33172..b6d9e40878f77 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.text.Text; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.DocValueFormat; @@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.ProfileShardResult; -import org.elasticsearch.search.profile.SearchProfileShardResults; -import org.elasticsearch.search.profile.SearchProfileShardResultsTests; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileResultsTests; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.test.ESTestCase; @@ -212,9 +212,9 @@ public void testMergeProfileResults() throws InterruptedException { SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0); SearchResponseMerger merger = new SearchResponseMerger(0, 0, SearchContext.TRACK_TOTAL_HITS_ACCURATE, searchTimeProvider, emptyReduceContextBuilder()); - Map expectedProfile = new HashMap<>(); + Map expectedProfile = new HashMap<>(); for (int i = 0; i < numResponses; i++) { - SearchProfileShardResults profile = SearchProfileShardResultsTests.createTestItem(); + SearchProfileResults profile = SearchProfileResultsTests.createTestItem(); expectedProfile.putAll(profile.getShardResults()); SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 5467ba524feeb..ddc3466460943 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.search.aggregations.AggregationsTests; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileShardResults; -import org.elasticsearch.search.profile.SearchProfileShardResultsTests; +import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileResultsTests; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestTests; import org.elasticsearch.test.ESTestCase; @@ -104,9 +104,16 @@ private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... sha SearchHits hits = SearchHitsTests.createTestItem(true, true); InternalAggregations aggregations = aggregationsTests.createTestInstance(); Suggest suggest = SuggestTests.createTestItem(); - SearchProfileShardResults profileShardResults = SearchProfileShardResultsTests.createTestItem(); - internalSearchResponse = new InternalSearchResponse(hits, aggregations, suggest, profileShardResults, - timedOut, terminatedEarly, numReducePhases); + SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem(); + internalSearchResponse = new InternalSearchResponse( + hits, + aggregations, + suggest, + profileResults, + timedOut, + terminatedEarly, + numReducePhases + ); } else { internalSearchResponse = InternalSearchResponse.empty(); } diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java similarity index 83% rename from server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java rename to server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java index ab6159bc61e14..4aa7b5818ce2b 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java @@ -31,11 +31,11 @@ import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; -public class SearchProfileShardResultsTests extends ESTestCase { +public class SearchProfileResultsTests extends ESTestCase { - public static SearchProfileShardResults createTestItem() { + public static SearchProfileResults createTestItem() { int size = rarely() ? 0 : randomIntBetween(1, 2); - Map searchProfileResults = new HashMap<>(size); + Map shards = new HashMap<>(size); for (int i = 0; i < size; i++) { List queryProfileResults = new ArrayList<>(); int queryItems = rarely() ? 0 : randomIntBetween(1, 2); @@ -43,9 +43,14 @@ public static SearchProfileShardResults createTestItem() { queryProfileResults.add(QueryProfileShardResultTests.createTestItem()); } AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1); - searchProfileResults.put(randomAlphaOfLengthBetween(5, 10), new ProfileShardResult(queryProfileResults, aggProfileShardResult)); + SearchProfileQueryPhaseResult searchResult = new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult); + ProfileResult fetchResult = randomBoolean() ? null : ProfileResultTests.createTestItem(2); + shards.put( + randomAlphaOfLengthBetween(5, 10), + new SearchProfileShardResult(searchResult, fetchResult) + ); } - return new SearchProfileShardResults(searchProfileResults); + return new SearchProfileResults(shards); } public void testFromXContent() throws IOException { @@ -61,7 +66,7 @@ public void testFromXContentWithRandomFields() throws IOException { } private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { - SearchProfileShardResults shardResult = createTestItem(); + SearchProfileResults shardResult = createTestItem(); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(shardResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); @@ -76,18 +81,17 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } else { mutated = originalBytes; } - SearchProfileShardResults parsed; + SearchProfileResults parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - ensureFieldName(parser, parser.nextToken(), SearchProfileShardResults.PROFILE_FIELD); + ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = SearchProfileShardResults.fromXContent(parser); + parsed = SearchProfileResults.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); } assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); - } } diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index d7dfc3f7354d8..a43da643ffd29 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -30,6 +31,7 @@ import org.elasticsearch.xpack.core.search.action.SubmitAsyncSearchRequest; import org.hamcrest.CoreMatchers; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -118,16 +120,30 @@ public void testFetchFailuresOnlySomeShards() throws Exception { public static final class SubFetchPhasePlugin extends Plugin implements SearchPlugin { @Override public List getFetchSubPhases(FetchPhaseConstructionContext context) { - return Collections.singletonList(searchContext -> new FetchSubPhaseProcessor() { + return Collections.singletonList(new FetchSubPhase() { @Override - public void setNextReader(LeafReaderContext readerContext) {} + public String name() { + return "test"; + } + + @Override + public String description() { + return "test"; + } @Override - public void process(FetchSubPhase.HitContext hitContext) { - if (hitContext.hit().getId().startsWith("boom")) { - throw new RuntimeException("boom"); - } + public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) throws IOException { + return new FetchSubPhaseProcessor() { + @Override + public void setNextReader(LeafReaderContext readerContext) {} + @Override + public void process(FetchSubPhase.HitContext hitContext) { + if (hitContext.hit().getId().startsWith("boom")) { + throw new RuntimeException("boom"); + } + } + }; } }); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index e2fcdf9fe8adc..4c4f7469a164a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -28,9 +30,9 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.FuzzyQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -47,7 +49,7 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.builder.PointInTimeBuilder; -import org.elasticsearch.search.profile.ProfileShardResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortMode; @@ -63,8 +65,6 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.action.search.ClosePointInTimeAction; -import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.spatial.SpatialPlugin; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder; @@ -1400,7 +1400,7 @@ public void testProfile() throws Exception { assertNoFailures(response); assertThat(response.getProfileResults().size(), equalTo(1)); - ProfileShardResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]); + SearchProfileShardResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]); assertThat(shardResult.getQueryProfileResults().size(), equalTo(1)); QueryProfileShardResult queryProfileShardResult = shardResult.getQueryProfileResults().get(0); assertThat(queryProfileShardResult.getQueryResults().size(), equalTo(1)); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index f7cc3aa87d059..59e8232379219 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -387,7 +387,7 @@ protected void // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index ad79e2e95dc6f..9ee2b92ceb768 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -243,7 +243,7 @@ void doGetInitialProgress(SearchRequest request, ActionListener // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 @@ -394,7 +394,7 @@ public void testDoProcessAggNullCheck() { // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 @@ -541,7 +541,7 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 @@ -642,7 +642,7 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 @@ -741,7 +741,7 @@ public void testFailureCounterIsResetOnSuccess() throws Exception { // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index 0895aa1b43ccd..f0ae6bfd95f84 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -81,7 +81,7 @@ public class TransformIndexerStateTests extends ESTestCase { // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index 643a6f24f253f..f54b071c72eb0 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -78,7 +78,7 @@ public class TransformIndexerTests extends ESTestCase { // Simulate completely null aggs null, new Suggest(Collections.emptyList()), - new SearchProfileShardResults(Collections.emptyMap()), + new SearchProfileResults(Collections.emptyMap()), false, false, 1 diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index 314df1bee9e94..a88a84588e9b8 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -45,7 +45,7 @@ import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds; import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.fetch.subphase.FieldAndFormat; -import org.elasticsearch.search.profile.SearchProfileShardResults; +import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.sort.SortBuilder; import java.io.IOException; @@ -144,7 +144,7 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio searchResponse.isTerminatedEarly(), searchResponse.getProfileResults() == null ? null - : new SearchProfileShardResults(searchResponse.getProfileResults()), + : new SearchProfileResults(searchResponse.getProfileResults()), searchResponse.getNumReducePhases() ), searchResponse.getScrollId(),