diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 7e1fea1ebde60..7b64a2f27cda6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -48,9 +48,9 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -89,7 +89,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.profile.ProfileResult; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; @@ -499,15 +499,15 @@ public void testSearchRequestProfiling() throws IOException { SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // tag::search-request-profiling-get - Map profilingResults = + Map profilingResults = searchResponse.getProfileResults(); // <1> - for (Map.Entry profilingResult : profilingResults.entrySet()) { // <2> + for (Map.Entry profilingResult : profilingResults.entrySet()) { // <2> String key = profilingResult.getKey(); // <3> - SearchProfileQueryPhaseResult profileShardResult = profilingResult.getValue(); // <4> + SearchProfileShardResult profileShardResult = profilingResult.getValue(); // <4> } // end::search-request-profiling-get - SearchProfileQueryPhaseResult profileShardResult = profilingResults.values().iterator().next(); + SearchProfileShardResult profileShardResult = profilingResults.values().iterator().next(); assertNotNull(profileShardResult); // tag::search-request-profiling-queries diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 2bb7640c9288e..72e91524503e6 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -17,8 +17,8 @@ The Profile API gives the user insight into how search requests are executed at a low level so that the user can understand why certain requests are slow, and take steps to improve them. Note that the Profile API, <>, doesn't measure network latency, -time spent in the search fetch phase, time spent while the requests spends in -queues or while merging shard responses on the coordinating node. +time spent while the requests spends in queues, or while merging shard +responses on the coordinating node. The output from the Profile API is *very* verbose, especially for complicated requests executed across many shards. Pretty-printing the response is @@ -163,7 +163,37 @@ The API returns the following result: ] } ], - "aggregations": [] + "aggregations": [], + "fetch": { + "type": "fetch", + "description": "", + "time_in_nanos": 660555, + "breakdown": { + "next_reader": 7292, + "next_reader_count": 1, + "load_stored_fields": 299325, + "load_stored_fields_count": 5 + }, + "debug": { + "stored_fields": ["_id", "_routing", "_source"] + }, + "children": [ + { + "type": "FetchSourcePhase", + "description": "", + "time_in_nanos": 20443, + "breakdown": { + "next_reader": 745, + "next_reader_count": 1, + "process": 19698, + "process_count": 5 + }, + "debug": { + "fast_path": 5 + } + } + ] + } } ] } @@ -196,7 +226,8 @@ The overall structure of the profile response is as follows: "collector": [...] <4> } ], - "aggregations": [...] <5> + "aggregations": [...], <5> + "fetch": {...} <6> } ] } @@ -208,15 +239,14 @@ The overall structure of the profile response is as follows: // TESTRESPONSE[s/"query": \[...\]/"query": $body.$_path/] // TESTRESPONSE[s/"collector": \[...\]/"collector": $body.$_path/] // TESTRESPONSE[s/"aggregations": \[...\]/"aggregations": []/] +// TESTRESPONSE[s/"fetch": \{...\}/"fetch": $body.$_path/] <1> A profile is returned for each shard that participated in the response, and is identified by a unique ID. -<2> Each profile contains a section which holds details about the query -execution. -<3> Each profile has a single time representing the cumulative rewrite time. -<4> Each profile also contains a section about the Lucene Collectors which run -the search. -<5> Each profile contains a section which holds the details about the -aggregation execution. +<2> Query timings and other debugging information. +<3> The cumulative rewrite time. +<4> Names and invocation timings for each collector. +<5> Aggregation timings, invocation counts, and debug information. +<6> Fetch timing and debug information. Because a search request may be executed against one or more shards in an index, and a search may cover one or more indices, the top level element in the profile @@ -295,7 +325,7 @@ Using our previous `match` query example, let's analyze the `query` section: ] -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n/] -// TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] +// TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": [], "fetch": $body.$_path}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"breakdown": \{...\}/"breakdown": $body.$_path/] <1> The breakdown timings are omitted for simplicity. @@ -347,7 +377,7 @@ Lucene execution: } -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:get message:search",\n"time_in_nanos": $body.$_path,/] -// TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] +// TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": [], "fetch": $body.$_path}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] Timings are listed in wall-clock nanoseconds and are not normalized at all. All @@ -448,7 +478,7 @@ Looking at the previous example: ] -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": $body.$_path,\n"rewrite_time": $body.$_path,/] -// TESTRESPONSE[s/]$/]}], "aggregations": []}]}}/] +// TESTRESPONSE[s/]$/]}], "aggregations": [], "fetch": $body.$_path}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] @@ -569,7 +599,7 @@ GET /my-index-000001/_search } -------------------------------------------------- // TEST[setup:my_index] -// TEST[s/_search/_search\?filter_path=profile.shards.id,profile.shards.searches,profile.shards.aggregations/] +// TEST[s/_search/_search\?filter_path=profile.shards.id,profile.shards.searches,profile.shards.aggregations,profile.shards.fetch/] This example has: @@ -673,13 +703,15 @@ The API returns the following result: ] } ], - "aggregations": [...] <1> + "aggregations": [...], <1> + "fetch": {...} } ] } } -------------------------------------------------- // TESTRESPONSE[s/"aggregations": \[\.\.\.\]/"aggregations": $body.$_path/] +// TESTRESPONSE[s/"fetch": \{\.\.\.\}/"fetch": $body.$_path/] // TESTRESPONSE[s/\.\.\.//] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[my-index-000001\]\[0\]"/"id": $body.profile.shards.0.id/] @@ -918,6 +950,99 @@ to give you a feel for A) what machinery in {es} is actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, the breakdown is inclusive of all children times. +[[profiling-fetch]] +===== Profiling Fetch + + +All shards the fetched documents will have a `fetch` section in the profile. +Let's execute a small search and have a look a the fetch profile: + +[source,console] +---- +GET /my-index-000001/_search?filter_path=profile.shards.fetch +{ + "profile": true, + "query": { + "term": { + "user.id": { + "value": "elkbee" + } + } + } +} +---- +// TEST[continued] + +And here is the fetch profile: + +[source,console-result] +---- +{ + "profile": { + "shards": [ + { + "fetch": { + "type": "fetch", + "description": "", + "time_in_nanos": 660555, + "breakdown": { + "next_reader": 7292, + "next_reader_count": 1, + "load_stored_fields": 299325, + "load_stored_fields_count": 5 + }, + "debug": { + "stored_fields": ["_id", "_routing", "_source"] + }, + "children": [ + { + "type": "FetchSourcePhase", + "description": "", + "time_in_nanos": 20443, + "breakdown": { + "next_reader": 745, + "next_reader_count": 1, + "process": 19698, + "process_count": 5 + }, + "debug": { + "fast_path": 4 + } + } + ] + } + } + ] + } +} +---- +// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] + +Since this is debugging information about the way that Elasticsearch executes +the fetch it can change from request to request and version to version. Even +patch versions may change the output here. That lack of consistency is what +makes it useful for debugging. + +Anyway! `time_in_nanos` measures the time total time of the fetch phase. +The `breakdown` counts and times the our +per-link:{glossary}/terms.html#glossary-segment[segment] preparation in +`next_reader` and the time taken loading stored fields in `load_stored_fields`. +Debug contains miscellaneous non-timing information, specifically +`stored_fields` lists the stored fields that fetch will have to load. If it is +an empty list then fetch will entirely skip loading stored fields. + +The `children` section lists the sub-phases that do the actual fetching work +and the `breakdown` has counts and timings for the +per-link:{glossary}/terms.html#glossary-segment[segment] preparation in +`next_reader` and the per document fetching in `process`. + +NOTE: We try hard to load all of the stored fields that we will need for the +fetch up front. This tends to make the `_source` phase a couple of microseconds +per hit. In that case the true cost of `_source` phase is hidden in the +`load_stored_fields` component of the breakdown. It's possible to entirely skip +loading stored fields by setting +`"_source": false, "stored_fields": ["_none_"]`. + [[profiling-considerations]] ===== Profiling Considerations @@ -936,16 +1061,13 @@ have a drastic effect compared to other components in the profiled query. [[profile-limitations]] ===== Limitations -- Profiling currently does not measure the search fetch phase nor the network -overhead. +- Profiling currently does not measure the network overhead. - Profiling also does not account for time spent in the queue, merging shard responses on the coordinating node, or additional work such as building global ordinals (an internal data structure used to speed up search). -- Profiling statistics are currently not available for suggestions, +- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch`. - Profiling of the reduce phase of aggregation is currently not available. -- The Profiler is still highly experimental. The Profiler is instrumenting parts -of Lucene that were never designed to be exposed in this manner, and so all -results should be viewed as a best effort to provide detailed diagnostics. We -hope to improve this over time. If you find obviously wrong numbers, strange -query structures, or other bugs, please report them! +- The Profiler is instrumenting internals that can change from version to +version. The resulting json should be considered mostly unstable, especially +things in the `debug` section. diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml index f4f119f68d6d2..c6acf90e60716 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml @@ -145,3 +145,39 @@ teardown: query: match_all: {} inner_hits: {} + +--- +profile fetch: + - skip: + version: ' - 7.99.99' + reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0 + + - do: + search: + index: test + body: + profile: true + query: + has_parent: + parent_type: question + query: + match_all: {} + inner_hits: {} + + - gt: { profile.shards.0.fetch.time_in_nanos: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - length: { profile.shards.0.fetch.children: 2 } + - match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.1.type: InnerHitsPhase } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index fe0a206d787ac..75d765e39f44b 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -97,10 +97,13 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import static java.util.stream.Collectors.toList; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.not; /** * This test class executes twice, first against the remote cluster, and then against another cluster that has the remote cluster @@ -405,6 +408,10 @@ public void testProfile() throws Exception { duelSearch(searchRequest, response -> { assertHits(response); assertFalse(response.getProfileResults().isEmpty()); + assertThat( + response.getProfileResults().values().stream().filter(sr -> sr.getFetchPhase() != null).collect(toList()), + not(empty()) + ); }); } @@ -813,6 +820,14 @@ private static Map responseToMap(SearchResponse response) throws List> shards = (List >)profile.get("shards"); for (Map shard : shards) { replaceProfileTime(shard); + /* + * The way we try to reduce round trips is by fetching all + * of the results we could possibly need from the remote + * cluster and then merging *those* together locally. This + * will end up fetching more documents total. So we can't + * really compare the fetch profiles here. + */ + shard.remove("fetch"); } } return responseMap; diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml new file mode 100644 index 0000000000000..7852d4245f0a7 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -0,0 +1,146 @@ +--- +setup: + - do: + indices.create: + index: test + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + keyword: + type: keyword + + - do: + index: + index: test + id: 1 + refresh: true + body: + keyword: [ "a", "b" ] + +--- +fetch fields: + - skip: + version: ' - 7.99.99' + reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0 + + - do: + search: + index: test + body: + _source: false + profile: true + fields: [keyword] + + - is_true: hits.hits.0._id + - match: { hits.hits.0.fields.keyword.0: a } + - match: { hits.hits.0.fields.keyword.1: b } + - gt: { profile.shards.0.fetch.time_in_nanos: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - length: { profile.shards.0.fetch.children: 1 } + - match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + +--- +fetch source: + - skip: + version: ' - 7.99.99' + reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0 + + - do: + search: + index: test + body: + profile: true + + - is_true: hits.hits.0._id + - match: { hits.hits.0._source.keyword.0: a } + - match: { hits.hits.0._source.keyword.1: b } + - gt: { profile.shards.0.fetch.time_in_nanos: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - length: { profile.shards.0.fetch.children: 1 } + - match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.0.debug.fast_path: 1 } + +--- +fetch nested source: + - skip: + version: ' - 7.99.99' + reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0 + + - do: + indices.create: + index: test_nested + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + keyword: + type: keyword + nested: + type: nested + properties: + text: + type: text + + - do: + index: + index: test_nested + id: 1 + refresh: true + body: + keyword: [ "a", "b" ] + nested: + - text: the quick brown fox + - text: jumped over the + - text: lazy dog + + - do: + search: + index: test_nested + body: + profile: true + query: + nested: + path: nested + query: + match_all: {} + inner_hits: {} + + - is_true: hits.hits.0._id + - match: { hits.hits.0._source.keyword.0: a } + - match: { hits.hits.0._source.keyword.1: b } + - gt: { profile.shards.0.fetch.time_in_nanos: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 } + - gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 } + - match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] } + - length: { profile.shards.0.fetch.children: 2 } + - match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 } + - match: { profile.shards.0.fetch.children.1.type: InnerHitsPhase } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 } + - gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index 276055a51dceb..a2c2226da9831 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.profile.ProfileResult; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.Instant; @@ -119,10 +119,10 @@ public void testSimpleProfile() { SearchResponse response = client().prepareSearch("idx").setProfile(true) .addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)).get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -164,10 +164,10 @@ public void testMultiLevelProfile() { ) ).get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -247,10 +247,10 @@ public void testMultiLevelProfileBreadthFirst() { .collectMode(SubAggCollectionMode.BREADTH_FIRST).field(TAG_FIELD).subAggregation(avg("avg").field(NUMBER_FIELD)))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -317,10 +317,10 @@ public void testDiversifiedAggProfile() { .subAggregation(max("max").field(NUMBER_FIELD))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -380,10 +380,10 @@ public void testComplexProfile() { .subAggregation(max("max").field(NUMBER_FIELD))))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -584,7 +584,7 @@ public void testNoProfile() { .subAggregation(max("max").field(NUMBER_FIELD))))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(0)); } @@ -614,10 +614,10 @@ public void testFilterByFilter() throws InterruptedException, IOException { .subAggregation(new MaxAggregationBuilder("m").field("date"))) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); @@ -701,10 +701,10 @@ public void testDateHistogramFilterByFilterDisabled() throws InterruptedExceptio .addAggregation(new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) .get(); assertSearchResponse(response); - Map profileResults = response.getProfileResults(); + Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries)); - for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) { + for (SearchProfileShardResult profileShardResult : profileResults.values()) { assertThat(profileShardResult, notNullValue()); AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults(); assertThat(aggProfileResults, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 218b2a14b27b0..b9e2c8adc32ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.profile.ProfileResult; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -77,7 +77,7 @@ public void testProfileQuery() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shard : resp.getProfileResults().entrySet()) { + for (Map.Entry shard : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -210,11 +210,11 @@ public void testSimpleMatch() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .get(); - Map p = resp.getProfileResults(); + Map p = resp.getProfileResults(); assertNotNull(p); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertEquals(result.getQueryName(), "TermQuery"); @@ -257,11 +257,11 @@ public void testBool() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .get(); - Map p = resp.getProfileResults(); + Map p = resp.getProfileResults(); assertNotNull(p); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertEquals(result.getQueryName(), "BooleanQuery"); @@ -329,7 +329,7 @@ public void testEmptyBool() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -381,7 +381,7 @@ public void testCollapsingBool() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -428,7 +428,7 @@ public void testBoosting() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -475,7 +475,7 @@ public void testDisMaxRange() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -521,7 +521,7 @@ public void testRange() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); @@ -576,7 +576,7 @@ public void testPhrase() throws Exception { assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); - for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { + for (Map.Entry shardResult : resp.getProfileResults().entrySet()) { for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index a5f46646669a5..909549dc1de17 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -42,6 +42,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileResultsBuilder; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; @@ -290,7 +291,7 @@ public InternalSearchResponse merge(boolean ignoreFrom, ReducedQueryPhase reduce assert currentOffset == sortedDocs.length : "expected no more score doc slices"; } } - return reducedQueryPhase.buildResponse(hits); + return reducedQueryPhase.buildResponse(hits, fetchResults); } private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, @@ -401,8 +402,22 @@ ReducedQueryPhase reducedQueryPhase(Collection quer numReducePhases++; // increment for this phase if (queryResults.isEmpty()) { // early terminate we have nothing to reduce final TotalHits totalHits = topDocsStats.getTotalHits(); - return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(), - false, null, null, null, null, SortedTopDocs.EMPTY, null, numReducePhases, 0, 0, true); + return new ReducedQueryPhase( + totalHits, + topDocsStats.fetchHits, + topDocsStats.getMaxScore(), + false, + null, + null, + null, + null, + SortedTopDocs.EMPTY, + null, + numReducePhases, + 0, + 0, + true + ); } int total = queryResults.size(); queryResults = queryResults.stream() @@ -419,7 +434,8 @@ ReducedQueryPhase reducedQueryPhase(Collection quer // count the total (we use the query result provider here, since we might not get any hits (we scrolled past them)) final Map>> groupedSuggestions = hasSuggest ? new HashMap<>() : Collections.emptyMap(); - final Map profileResults = hasProfileResults ? new HashMap<>(queryResults.size()) + final Map profileShardResults = hasProfileResults + ? new HashMap<>(queryResults.size()) : Collections.emptyMap(); int from = 0; int size = 0; @@ -449,7 +465,7 @@ ReducedQueryPhase reducedQueryPhase(Collection quer } if (hasProfileResults) { String key = result.getSearchShardTarget().toString(); - profileResults.put(key, result.consumeProfileResult()); + profileShardResults.put(key, result.consumeProfileResult()); } } final Suggest reducedSuggest; @@ -462,11 +478,13 @@ ReducedQueryPhase reducedQueryPhase(Collection quer reducedCompletionSuggestions = reducedSuggest.filter(CompletionSuggestion.class); } final InternalAggregations aggregations = reduceAggs(aggReduceContextBuilder, performFinalReduce, bufferedAggs); - final SearchProfileResults shardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); + final SearchProfileResultsBuilder profileBuilder = profileShardResults.isEmpty() + ? null + : new SearchProfileResultsBuilder(profileShardResults); final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, bufferedTopDocs, from, size, reducedCompletionSuggestions); final TotalHits totalHits = topDocsStats.getTotalHits(); return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(), - topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, shardResults, sortedTopDocs, + topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, profileBuilder, sortedTopDocs, sortValueFormats, numReducePhases, size, from, false); } @@ -535,7 +553,7 @@ public static final class ReducedQueryPhase { // the reduced internal aggregations final InternalAggregations aggregations; // the reduced profile results - final SearchProfileResults shardResults; + final SearchProfileResultsBuilder profileBuilder; // the number of reduces phases final int numReducePhases; //encloses info about the merged top docs, the sort fields used to sort the score docs etc. @@ -549,9 +567,22 @@ public static final class ReducedQueryPhase { // sort value formats used to sort / format the result final DocValueFormat[] sortValueFormats; - ReducedQueryPhase(TotalHits totalHits, long fetchHits, float maxScore, boolean timedOut, Boolean terminatedEarly, Suggest suggest, - InternalAggregations aggregations, SearchProfileResults shardResults, SortedTopDocs sortedTopDocs, - DocValueFormat[] sortValueFormats, int numReducePhases, int size, int from, boolean isEmptyResult) { + ReducedQueryPhase( + TotalHits totalHits, + long fetchHits, + float maxScore, + boolean timedOut, + Boolean terminatedEarly, + Suggest suggest, + InternalAggregations aggregations, + SearchProfileResultsBuilder profileBuilder, + SortedTopDocs sortedTopDocs, + DocValueFormat[] sortValueFormats, + int numReducePhases, + int size, + int from, + boolean isEmptyResult + ) { if (numReducePhases <= 0) { throw new IllegalArgumentException("at least one reduce phase must have been applied but was: " + numReducePhases); } @@ -562,7 +593,7 @@ public static final class ReducedQueryPhase { this.terminatedEarly = terminatedEarly; this.suggest = suggest; this.aggregations = aggregations; - this.shardResults = shardResults; + this.profileBuilder = profileBuilder; this.numReducePhases = numReducePhases; this.sortedTopDocs = sortedTopDocs; this.size = size; @@ -575,8 +606,28 @@ public static final class ReducedQueryPhase { * Creates a new search response from the given merged hits. * @see #merge(boolean, ReducedQueryPhase, Collection, IntFunction) */ - public InternalSearchResponse buildResponse(SearchHits hits) { - return new InternalSearchResponse(hits, aggregations, suggest, shardResults, timedOut, terminatedEarly, numReducePhases); + public InternalSearchResponse buildResponse(SearchHits hits, Collection fetchResults) { + return new InternalSearchResponse( + hits, + aggregations, + suggest, + buildSearchProfileResults(fetchResults), + timedOut, + terminatedEarly, + numReducePhases + ); + } + + private SearchProfileResults buildSearchProfileResults(Collection fetchResults) { + if (profileBuilder == null) { + assert fetchResults.stream() + .map(SearchPhaseResult::fetchResult) + .filter(r -> r != null) + .allMatch(r -> r.profileResult() == null) : "found fetch profile without search profile"; + return null; + + } + return profileBuilder.build(fetchResults); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index dd13e2f46a667..ea925b5a11f78 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -11,18 +11,18 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.search.SearchHit; @@ -30,8 +30,8 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -229,7 +229,7 @@ public String pointInTimeId() { * @return The profile results or an empty map */ @Nullable - public Map getProfileResults() { + public Map getProfileResults() { return internalResponse.profile(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index ddf14e51fa64a..3de3c95324571 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -27,8 +27,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; @@ -111,7 +111,7 @@ SearchResponse getMergedResponse(Clusters clusters) { //the current reduce phase counts as one int numReducePhases = 1; List failures = new ArrayList<>(); - Map profileResults = new HashMap<>(); + Map profileResults = new HashMap<>(); List aggs = new ArrayList<>(); Map shards = new TreeMap<>(); List topDocsList = new ArrayList<>(searchResponses.size()); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index 7c4def66326a4..dc955aa377921 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import java.io.IOException; @@ -39,8 +39,15 @@ public class SearchResponseSections implements ToXContentFragment { protected final Boolean terminatedEarly; protected final int numReducePhases; - public SearchResponseSections(SearchHits hits, Aggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly, - SearchProfileResults profileResults, int numReducePhases) { + public SearchResponseSections( + SearchHits hits, + Aggregations aggregations, + Suggest suggest, + boolean timedOut, + Boolean terminatedEarly, + SearchProfileResults profileResults, + int numReducePhases + ) { this.hits = hits; this.aggregations = aggregations; this.suggest = suggest; @@ -83,7 +90,7 @@ public final int getNumReducePhases() { * * @return Profile results */ - public final Map profile() { + public final Map profile() { if (profileResults == null) { return Collections.emptyMap(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index eeab675ce986e..eeac76b033bd4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.Inject; @@ -36,9 +35,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; @@ -53,8 +53,8 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -367,9 +367,10 @@ static void ccsRemoteReduce(TaskId parentTaskId, SearchRequest searchRequest, Or remoteClusterClient.search(ccsSearchRequest, new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { - Map profileResults = searchResponse.getProfileResults(); + Map profileResults = searchResponse.getProfileResults(); SearchProfileResults profile = profileResults == null || profileResults.isEmpty() - ? null : new SearchProfileResults(profileResults); + ? null + : new SearchProfileResults(profileResults); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchResponse.getHits(), (InternalAggregations) searchResponse.getAggregations(), searchResponse.getSuggest(), profile, searchResponse.isTimedOut(), searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 741d6787f3e5e..7bb05aa122d83 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -40,11 +40,16 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.SubSearchContext; +import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; class TopHitsAggregator extends MetricsAggregator { @@ -62,6 +67,7 @@ private static class Collectors { private final SubSearchContext subSearchContext; private final LongObjectPagedHashMap topDocsCollectors; + private final List fetchProfiles; TopHitsAggregator( SubSearchContext subSearchContext, @@ -73,6 +79,7 @@ private static class Collectors { super(name, context, parent, metadata); topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays()); this.subSearchContext = subSearchContext; + fetchProfiles = context.profiling() ? new ArrayList<>() : null; } @Override @@ -183,6 +190,9 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE subSearchContext.docIdsToLoad(docIdsToLoad, docIdsToLoad.length); subSearchContext.fetchPhase().execute(subSearchContext); FetchSearchResult fetchResult = subSearchContext.fetchResult(); + if (fetchProfiles != null) { + fetchProfiles.add(fetchResult.profileResult()); + } SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); for (int i = 0; i < internalHits.length; i++) { ScoreDoc scoreDoc = topDocs.scoreDocs[i]; @@ -226,6 +236,19 @@ public InternalTopHits buildEmptyAggregation() { ); } + @Override + public void collectDebugInfo(BiConsumer add) { + super.collectDebugInfo(add); + List> debug = new ArrayList<>(); + for (ProfileResult result : fetchProfiles) { + Map resultDebug = new HashMap<>(); + resultDebug.put("time", result.getTime()); + resultDebug.put("breakdown", result.getTimeBreakdown()); + debug.add(resultDebug); + } + add.accept("fetch_profile", debug); + } + @Override protected void doClose() { Releasables.close(topDocsCollectors); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index afa4738a9ecd5..214985f22f8fb 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -14,14 +14,14 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.TotalHits; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.mapper.MappedFieldType; @@ -40,6 +40,7 @@ import org.elasticsearch.search.fetch.subphase.InnerHitsPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SourceLookup; +import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; @@ -81,11 +82,26 @@ public void execute(SearchContext context) { if (context.docIdsToLoadSize() == 0) { // no individual hits to process, so we shortcut - context.fetchResult().hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), - context.queryResult().getMaxScore())); + SearchHits hits = new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore()); + context.fetchResult().shardResult(hits, null); return; } + Profiler profiler = context.getProfilers() == null ? Profiler.NOOP : context.getProfilers().startProfilingFetchPhase(); + SearchHits hits = null; + try { + hits = buildSearchHits(context, profiler); + } finally { + // Always finish profiling + ProfileResult profileResult = profiler.finish(); + // Only set the shardResults if building search hits was successful + if (hits != null) { + context.fetchResult().shardResult(hits, profileResult); + } + } + } + + private SearchHits buildSearchHits(SearchContext context, Profiler profiler) { DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; for (int index = 0; index < context.docIdsToLoadSize(); index++) { docs[index] = new DocIdToIndex(context.docIdsToLoad()[index], index); @@ -95,12 +111,13 @@ public void execute(SearchContext context) { Map> storedToRequestedFields = new HashMap<>(); FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); + profiler.visitor(fieldsVisitor); FetchContext fetchContext = new FetchContext(context); SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; - List processors = getProcessors(context.shardTarget(), fetchContext); + List processors = getProcessors(context.shardTarget(), fetchContext, profiler); NestedDocuments nestedDocuments = context.getSearchExecutionContext().getNestedDocuments(); int currentReaderIndex = -1; @@ -116,27 +133,33 @@ public void execute(SearchContext context) { try { int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); if (currentReaderIndex != readerIndex) { - currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); - currentReaderIndex = readerIndex; - if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader - && hasSequentialDocs && docs.length >= 10) { - // All the docs to fetch are adjacent but Lucene stored fields are optimized - // for random access and don't optimize for sequential access - except for merging. - // So we do a little hack here and pretend we're going to do merges in order to - // get better sequential access. - SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); - fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument; - } else { - fieldReader = currentReaderContext.reader()::document; - } - for (FetchSubPhaseProcessor processor : processors) { - processor.setNextReader(currentReaderContext); + profiler.startNextReader(); + try { + currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); + currentReaderIndex = readerIndex; + if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader + && hasSequentialDocs && docs.length >= 10) { + // All the docs to fetch are adjacent but Lucene stored fields are optimized + // for random access and don't optimize for sequential access - except for merging. + // So we do a little hack here and pretend we're going to do merges in order to + // get better sequential access. + SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); + fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument; + } else { + fieldReader = currentReaderContext.reader()::document; + } + for (FetchSubPhaseProcessor processor : processors) { + processor.setNextReader(currentReaderContext); + } + leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(currentReaderContext); + } finally { + profiler.stopNextReader(); } - leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(currentReaderContext); } assert currentReaderContext != null; HitContext hit = prepareHitContext( context, + profiler, leafNestedDocuments, fieldsVisitor, docId, @@ -156,17 +179,16 @@ public void execute(SearchContext context) { } TotalHits totalHits = context.queryResult().getTotalHits(); - context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore())); - + return new SearchHits(hits, totalHits, context.queryResult().getMaxScore()); } - List getProcessors(SearchShardTarget target, FetchContext context) { + List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) { try { List processors = new ArrayList<>(); for (FetchSubPhase fsp : fetchSubPhases) { FetchSubPhaseProcessor processor = fsp.getProcessor(context); if (processor != null) { - processors.add(processor); + processors.add(profiler.profile(fsp.getClass().getSimpleName(), "", processor)); } } return processors; @@ -236,6 +258,7 @@ private boolean sourceRequired(SearchContext context) { } private HitContext prepareHitContext(SearchContext context, + Profiler profiler, LeafNestedDocuments nestedDocuments, FieldsVisitor fieldsVisitor, int docId, @@ -244,9 +267,9 @@ private HitContext prepareHitContext(SearchContext context, CheckedBiConsumer storedFieldReader) throws IOException { if (nestedDocuments.advance(docId - subReaderContext.docBase) == null) { return prepareNonNestedHitContext( - context, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, storedFieldReader); + context, profiler, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, storedFieldReader); } else { - return prepareNestedHitContext(context, docId, nestedDocuments, storedToRequestedFields, + return prepareNestedHitContext(context, profiler, docId, nestedDocuments, storedToRequestedFields, subReaderContext, storedFieldReader); } } @@ -259,6 +282,7 @@ private HitContext prepareHitContext(SearchContext context, * allows fetch subphases that use the hit context to access the preloaded source. */ private HitContext prepareNonNestedHitContext(SearchContext context, + Profiler profiler, FieldsVisitor fieldsVisitor, int docId, Map> storedToRequestedFields, @@ -271,8 +295,14 @@ private HitContext prepareNonNestedHitContext(SearchContext context, return new HitContext(hit, subReaderContext, subDocId); } else { SearchHit hit; - loadStoredFields(context.getSearchExecutionContext()::getFieldType, searchExecutionContext.getType(), fieldReader, - fieldsVisitor, subDocId); + loadStoredFields( + context.getSearchExecutionContext()::getFieldType, + profiler, + searchExecutionContext.getType(), + fieldReader, + fieldsVisitor, + subDocId + ); Uid uid = fieldsVisitor.uid(); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); @@ -307,6 +337,7 @@ private HitContext prepareNonNestedHitContext(SearchContext context, */ @SuppressWarnings("unchecked") private HitContext prepareNestedHitContext(SearchContext context, + Profiler profiler, int topDocId, LeafNestedDocuments nestedInfo, Map> storedToRequestedFields, @@ -334,8 +365,14 @@ private HitContext prepareNestedHitContext(SearchContext context, } } else { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(searchExecutionContext::getFieldType, searchExecutionContext.getType(), - storedFieldReader, rootFieldsVisitor, nestedInfo.rootDoc()); + loadStoredFields( + searchExecutionContext::getFieldType, + profiler, + searchExecutionContext.getType(), + storedFieldReader, + rootFieldsVisitor, + nestedInfo.rootDoc() + ); rootFieldsVisitor.postProcess(searchExecutionContext::getFieldType, searchExecutionContext.getType()); rootId = rootFieldsVisitor.uid(); @@ -354,8 +391,14 @@ private HitContext prepareNestedHitContext(SearchContext context, Map metaFields = emptyMap(); if (context.hasStoredFields() && context.storedFieldsContext().fieldNames().isEmpty() == false) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); - loadStoredFields(searchExecutionContext::getFieldType, searchExecutionContext.getType(), - storedFieldReader, nestedFieldsVisitor, nestedInfo.doc()); + loadStoredFields( + searchExecutionContext::getFieldType, + profiler, + searchExecutionContext.getType(), + storedFieldReader, + nestedFieldsVisitor, + nestedInfo.doc() + ); if (nestedFieldsVisitor.fields().isEmpty() == false) { docFields = new HashMap<>(); metaFields = new HashMap<>(); @@ -400,12 +443,18 @@ private HitContext prepareNestedHitContext(SearchContext context, } private void loadStoredFields(Function fieldTypeLookup, + Profiler profileListener, @Nullable String type, CheckedBiConsumer fieldReader, FieldsVisitor fieldVisitor, int docId) throws IOException { - fieldVisitor.reset(); - fieldReader.accept(docId, fieldVisitor); - fieldVisitor.postProcess(fieldTypeLookup, type); + try { + profileListener.startLoadingStoredFields(); + fieldVisitor.reset(); + fieldReader.accept(docId, fieldVisitor); + fieldVisitor.postProcess(fieldTypeLookup, type); + } finally { + profileListener.stopLoadingStoredFields(); + } } private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fieldsVisitor, @@ -439,4 +488,52 @@ private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fi static boolean hasSequentialDocs(DocIdToIndex[] docs) { return docs.length > 0 && docs[docs.length-1].docId - docs[0].docId == docs.length - 1; } + + interface Profiler { + ProfileResult finish(); + + FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor processor); + + void visitor(FieldsVisitor fieldsVisitor); + + void startLoadingStoredFields(); + + void stopLoadingStoredFields(); + + void startNextReader(); + + void stopNextReader(); + + Profiler NOOP = new Profiler() { + @Override + public ProfileResult finish() { + return null; + } + + @Override + public void visitor(FieldsVisitor fieldsVisitor) {} + + @Override + public FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor processor) { + return processor; + } + + @Override + public void startLoadingStoredFields() {} + + @Override + public void stopLoadingStoredFields() {} + + @Override + public void startNextReader() {} + + @Override + public void stopNextReader() {} + + @Override + public String toString() { + return "noop"; + } + }; + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java new file mode 100644 index 0000000000000..822dcabb37ac8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchProfiler.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.fetch; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.index.fieldvisitor.FieldsVisitor; +import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; +import org.elasticsearch.search.profile.AbstractProfileBreakdown; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.Timer; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static java.util.stream.Collectors.toList; + +public class FetchProfiler implements FetchPhase.Profiler { + private final FetchProfileBreakdown current; + + /** + * Start profiling at the current time. + */ + public FetchProfiler() { + this(System.nanoTime()); + } + + /** + * Build the profiler starting at a fixed time. + */ + public FetchProfiler(long nanoTime) { + current = new FetchProfileBreakdown(nanoTime); + } + + /** + * Finish profiling at the current time. + */ + @Override + public ProfileResult finish() { + return finish(System.nanoTime()); + } + + /** + * Finish profiling at a fixed time. + */ + public ProfileResult finish(long nanoTime) { + return current.result(nanoTime); + } + + @Override + public void visitor(FieldsVisitor fieldsVisitor) { + current.debug.put( + "stored_fields", + fieldsVisitor == null ? org.elasticsearch.core.List.of() : fieldsVisitor.getFieldNames().stream().sorted().collect(toList()) + ); + } + + @Override + public FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor delegate) { + FetchSubPhaseProfileBreakdown breakdown = new FetchSubPhaseProfileBreakdown(type, description, delegate); + current.subPhases.add(breakdown); + return new FetchSubPhaseProcessor() { + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + Timer timer = breakdown.getTimer(FetchSubPhaseTiming.NEXT_READER); + timer.start(); + try { + delegate.setNextReader(readerContext); + } finally { + timer.stop(); + } + } + + @Override + public void process(HitContext hitContext) throws IOException { + Timer timer = breakdown.getTimer(FetchSubPhaseTiming.PROCESS); + timer.start(); + try { + delegate.process(hitContext); + } finally { + timer.stop(); + } + } + }; + } + + @Override + public void startLoadingStoredFields() { + current.getTimer(FetchPhaseTiming.LOAD_STORED_FIELDS).start(); + } + + @Override + public void stopLoadingStoredFields() { + current.getTimer(FetchPhaseTiming.LOAD_STORED_FIELDS).stop(); + } + + @Override + public void startNextReader() { + current.getTimer(FetchPhaseTiming.NEXT_READER).start(); + } + + @Override + public void stopNextReader() { + current.getTimer(FetchPhaseTiming.NEXT_READER).stop(); + } + + static class FetchProfileBreakdown extends AbstractProfileBreakdown { + private final long start; + private final Map debug = new HashMap<>(); + private final List subPhases = new ArrayList<>(); + + FetchProfileBreakdown(long start) { + super(FetchPhaseTiming.class); + this.start = start; + } + + @Override + protected Map toDebugMap() { + return org.elasticsearch.core.Map.copyOf(debug); + } + + ProfileResult result(long stop) { + List children = subPhases.stream() + .sorted(Comparator.comparing(b -> b.type)) + .map(FetchSubPhaseProfileBreakdown::result) + .collect(toList()); + return new ProfileResult("fetch", "", toBreakdownMap(), toDebugMap(), stop - start, children); + } + } + + enum FetchPhaseTiming { + NEXT_READER, + LOAD_STORED_FIELDS; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + + static class FetchSubPhaseProfileBreakdown extends AbstractProfileBreakdown { + private final String type; + private final String description; + private final FetchSubPhaseProcessor processor; + + FetchSubPhaseProfileBreakdown(String type, String description, FetchSubPhaseProcessor processor) { + super(FetchSubPhaseTiming.class); + this.type = type; + this.description = description; + this.processor = processor; + } + + @Override + protected Map toDebugMap() { + return processor.getDebugInfo(); + } + + ProfileResult result() { + return new ProfileResult(type, description, toBreakdownMap(), toDebugMap(), toNodeTime(), org.elasticsearch.core.List.of()); + } + } + + enum FetchSubPhaseTiming { + NEXT_READER, + PROCESS; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 5e9f6ac6580d3..a65f8780bfe84 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.fetch; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; @@ -15,6 +16,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.query.QuerySearchResult; import java.io.IOException; @@ -25,18 +27,34 @@ public final class FetchSearchResult extends SearchPhaseResult { // client side counter private transient int counter; + private ProfileResult profileResult; + public FetchSearchResult() { } + public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) { + this.contextId = id; + setSearchShardTarget(shardTarget); + } + public FetchSearchResult(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); hits = new SearchHits(in); + if (in.getVersion().onOrAfter(Version.V_7_16_0)) { + profileResult = in.readOptionalWriteable(ProfileResult::new); + } else { + profileResult = null; + } } - public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) { - this.contextId = id; - setSearchShardTarget(shardTarget); + @Override + public void writeTo(StreamOutput out) throws IOException { + contextId.writeTo(out); + hits.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_7_16_0)) { + out.writeOptionalWriteable(profileResult); + } } @Override @@ -49,9 +67,11 @@ public FetchSearchResult fetchResult() { return this; } - public void hits(SearchHits hits) { + public void shardResult(SearchHits hits, ProfileResult profileResult) { assert assertNoSearchTarget(hits); this.hits = hits; + assert this.profileResult == null; + this.profileResult = profileResult; } private boolean assertNoSearchTarget(SearchHits hits) { @@ -74,9 +94,7 @@ public int counterGetAndIncrement() { return counter++; } - @Override - public void writeTo(StreamOutput out) throws IOException { - contextId.writeTo(out); - hits.writeTo(out); + public ProfileResult profileResult() { + return profileResult; } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java index 3482e85875a94..b2a1058e62c35 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSubPhaseProcessor.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import java.io.IOException; +import java.util.Map; /** * Executes the logic for a {@link FetchSubPhase} against a particular leaf reader and hit @@ -28,4 +29,11 @@ public interface FetchSubPhaseProcessor { */ void process(HitContext hitContext) throws IOException; + /** + * Called when profiling after processing all documents to get any extra + * debug information the phase collected. + */ + default Map getDebugInfo() { + return null; + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java index d4edd8f711801..73f8b9028daf4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainPhase.java @@ -20,7 +20,6 @@ * Explains the scoring calculations for the top hits. */ public final class ExplainPhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.explain() == false) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index ef10bfc4ac6d0..1fe43794ce6e6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -28,7 +28,6 @@ * Specifying {@code "docvalue_fields": ["field1", "field2"]} */ public final class FetchDocValuesPhase implements FetchSubPhase { - private static final String USE_DEFAULT_FORMAT = "use_field_mapping"; private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(FetchDocValuesPhase.class); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java index 4761d4d2b1627..5aa905b00a17f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhase.java @@ -24,7 +24,6 @@ * retrieves the field values from _source and returns them as document fields. */ public final class FetchFieldsPhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java index 0f176a7a2c622..473a4e56c8209 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchScorePhase.java @@ -21,7 +21,6 @@ import java.io.IOException; public class FetchScorePhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { if (context.fetchScores() == false) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index d27e9d6688ac3..ba9c5b0d9ab4a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -23,7 +23,6 @@ import java.util.Map; public final class FetchSourcePhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext(); @@ -34,6 +33,8 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { assert fetchSourceContext.fetchSource(); return new FetchSubPhaseProcessor() { + private int fastPath; + @Override public void setNextReader(LeafReaderContext readerContext) { @@ -50,46 +51,51 @@ public void process(HitContext hitContext) { } hitExecute(fetchSourceContext, hitContext); } - }; - } - @SuppressWarnings("unchecked") - private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitContext) { + @SuppressWarnings("unchecked") + private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitContext) { + final boolean nestedHit = hitContext.hit().getNestedIdentity() != null; + SourceLookup source = hitContext.sourceLookup(); - final boolean nestedHit = hitContext.hit().getNestedIdentity() != null; - SourceLookup source = hitContext.sourceLookup(); + // If this is a parent document and there are no source filters, then add the source as-is. + if (nestedHit == false && containsFilters(fetchSourceContext) == false) { + hitContext.hit().sourceRef(source.internalSourceRef()); + fastPath++; + return; + } - // If this is a parent document and there are no source filters, then add the source as-is. - if (nestedHit == false && containsFilters(fetchSourceContext) == false) { - hitContext.hit().sourceRef(source.internalSourceRef()); - return; - } + // Otherwise, filter the source and add it to the hit. + Object value = source.filter(fetchSourceContext); + if (nestedHit) { + value = getNestedSource((Map) value, hitContext); + } - // Otherwise, filter the source and add it to the hit. - Object value = source.filter(fetchSourceContext); - if (nestedHit) { - value = getNestedSource((Map) value, hitContext); - } + try { + final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length()); + BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); + XContentBuilder builder = new XContentBuilder(source.sourceContentType().xContent(), streamOutput); + if (value != null) { + builder.value(value); + } else { + // This happens if the source filtering could not find the specified in the _source. + // Just doing `builder.value(null)` is valid, but the xcontent validation can't detect what format + // it is. In certain cases, for example response serialization we fail if no xcontent type can't be + // detected. So instead we just return an empty top level object. Also this is in inline with what was + // being return in this situation in 5.x and earlier. + builder.startObject(); + builder.endObject(); + } + hitContext.hit().sourceRef(BytesReference.bytes(builder)); + } catch (IOException e) { + throw new ElasticsearchException("Error filtering source", e); + } + } - try { - final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length()); - BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); - XContentBuilder builder = new XContentBuilder(source.sourceContentType().xContent(), streamOutput); - if (value != null) { - builder.value(value); - } else { - // This happens if the source filtering could not find the specified in the _source. - // Just doing `builder.value(null)` is valid, but the xcontent validation can't detect what format - // it is. In certain cases, for example response serialization we fail if no xcontent type can't be - // detected. So instead we just return an empty top level object. Also this is in inline with what was - // being return in this situation in 5.x and earlier. - builder.startObject(); - builder.endObject(); + @Override + public Map getDebugInfo() { + return org.elasticsearch.core.Map.of("fast_path", fastPath); } - hitContext.hit().sourceRef(BytesReference.bytes(builder)); - } catch (IOException e) { - throw new ElasticsearchException("Error filtering source", e); - } + }; } private static boolean containsFilters(FetchSourceContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java index fc16e054e9f88..8b89aecc4761e 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchVersionPhase.java @@ -18,7 +18,6 @@ import java.io.IOException; public final class FetchVersionPhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.version() == false) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index 0383ec1b605cf..85e2d97ac8f7d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -36,7 +36,7 @@ public InnerHitsPhase(FetchPhase fetchPhase) { @Override public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) { - if (searchContext.innerHits() == null) { + if (searchContext.innerHits() == null || searchContext.innerHits().getInnerHits().isEmpty()) { return null; } Map innerHits = searchContext.innerHits().getInnerHits(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java index 3f704e6c6dca2..06e74f9fab60f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -25,7 +25,6 @@ import java.util.Map; public final class MatchedQueriesPhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException { Map namedQueries = new HashMap<>(); @@ -72,5 +71,4 @@ public void process(HitContext hitContext) { } }; } - } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java index 467e7e79acf7d..de0597ba696f0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java @@ -22,10 +22,9 @@ import java.util.List; public final class ScriptFieldsPhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { - if (context.scriptFields() == null) { + if (context.scriptFields() == null || context.scriptFields().fields().isEmpty()) { return null; } List scriptFields = context.scriptFields().fields(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java index 65c97f5887c3f..29909124c64f0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermPhase.java @@ -18,7 +18,6 @@ import java.io.IOException; public final class SeqNoPrimaryTermPhase implements FetchSubPhase { - @Override public FetchSubPhaseProcessor getProcessor(FetchContext context) { if (context.seqNoAndPrimaryTerm() == false) { diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java index dd3b6baf88d7a..4fa628ca3c8d7 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java @@ -40,13 +40,13 @@ public InternalSearchResponse(SearchHits hits, InternalAggregations aggregations public InternalSearchResponse(StreamInput in) throws IOException { super( - new SearchHits(in), - in.readBoolean() ? InternalAggregations.readFrom(in) : null, - in.readBoolean() ? new Suggest(in) : null, - in.readBoolean(), - in.readOptionalBoolean(), - in.readOptionalWriteable(SearchProfileResults::new), - in.readVInt() + new SearchHits(in), + in.readBoolean() ? InternalAggregations.readFrom(in) : null, + in.readBoolean() ? new Suggest(in) : null, + in.readBoolean(), + in.readOptionalBoolean(), + in.readOptionalWriteable(SearchProfileResults::new), + in.readVInt() ); } diff --git a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java index d2c4972219ce3..b06bbe5bc3572 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -9,15 +9,16 @@ package org.elasticsearch.search.profile; import org.elasticsearch.Version; -import org.elasticsearch.common.xcontent.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.InstantiatingObjectParser; +import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.core.TimeValue; import java.io.IOException; import java.util.Collections; @@ -26,17 +27,13 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; +import static java.util.stream.Collectors.toMap; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * This class is the internal representation of a profiled Query, corresponding - * to a single node in the query tree. It is built after the query has finished executing - * and is merely a structured representation, rather than the entity that collects the timing - * profile (see InternalProfiler for that) - *

- * Each InternalProfileResult has a List of InternalProfileResults, which will contain - * "children" queries if applicable + * The result of a profiled *thing*, like a query or an aggregation. See + * {@link AbstractProfiler} for the statistic collection framework. */ public final class ProfileResult implements Writeable, ToXContentObject { static final ParseField TYPE = new ParseField("type"); @@ -161,13 +158,41 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.endObject(); } + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ProfileResult other = (ProfileResult) obj; + return type.equals(other.type) + && description.equals(other.description) + && breakdown.equals(other.breakdown) + && debug.equals(other.debug) + && nodeTime == other.nodeTime + && children.equals(other.children); + } + + @Override + public int hashCode() { + return Objects.hash(type, description, breakdown, debug, nodeTime, children); + } + + @Override + public String toString() { + return Strings.toString(this); + } + private static final InstantiatingObjectParser PARSER; static { InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder("profile_result", true, ProfileResult.class); parser.declareString(constructorArg(), TYPE); parser.declareString(constructorArg(), DESCRIPTION); - parser.declareObject(constructorArg(), (p, c) -> p.map(), BREAKDOWN); + parser.declareObject( + constructorArg(), + (p, c) -> p.map().entrySet().stream().collect(toMap(Map.Entry::getKey, e -> ((Number) e.getValue()).longValue())), + BREAKDOWN + ); parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG); parser.declareLong(constructorArg(), NODE_TIME_RAW); parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN); diff --git a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java index 45066c73f0e39..b7ed401809a7d 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java +++ b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java @@ -8,8 +8,11 @@ package org.elasticsearch.search.profile; +import org.elasticsearch.search.fetch.FetchProfiler; import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; import org.elasticsearch.search.profile.aggregation.AggregationProfiler; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.profile.query.QueryProfiler; import java.util.ArrayList; @@ -20,18 +23,17 @@ public final class Profilers { private final ContextIndexSearcher searcher; - private final List queryProfilers; - private final AggregationProfiler aggProfiler; + private final List queryProfilers = new ArrayList<>(); + private final AggregationProfiler aggProfiler = new AggregationProfiler(); - /** Sole constructor. This {@link Profilers} instance will initially wrap one {@link QueryProfiler}. */ public Profilers(ContextIndexSearcher searcher) { this.searcher = searcher; - this.queryProfilers = new ArrayList<>(); - this.aggProfiler = new AggregationProfiler(); addQueryProfiler(); } - /** Switch to a new profile. */ + /** + * Begin profiling a new query. + */ public QueryProfiler addQueryProfiler() { QueryProfiler profiler = new QueryProfiler(); searcher.setProfiler(profiler); @@ -39,19 +41,45 @@ public QueryProfiler addQueryProfiler() { return profiler; } - /** Get the current profiler. */ + /** + * Get the profiler for the query we are currently processing. + */ public QueryProfiler getCurrentQueryProfiler() { return queryProfilers.get(queryProfilers.size() - 1); } - /** Return the list of all created {@link QueryProfiler}s so far. */ + /** + * The list of all {@link QueryProfiler}s created so far. + */ public List getQueryProfilers() { return Collections.unmodifiableList(queryProfilers); } - /** Return the {@link AggregationProfiler}. */ public AggregationProfiler getAggregationProfiler() { return aggProfiler; } + /** + * Build a profiler for the fetch phase. + */ + public FetchProfiler startProfilingFetchPhase() { + return new FetchProfiler(); + } + + /** + * Build the results for the query phase. + */ + public SearchProfileQueryPhaseResult buildQueryPhaseResults() { + List queryResults = new ArrayList<>(queryProfilers.size()); + for (QueryProfiler queryProfiler : queryProfilers) { + QueryProfileShardResult result = new QueryProfileShardResult( + queryProfiler.getTree(), + queryProfiler.getRewriteTime(), + queryProfiler.getCollector() + ); + queryResults.add(result); + } + AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree()); + return new SearchProfileQueryPhaseResult(queryResults, aggResults); + } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java index 0192c7a2f916f..38c5cb250f9d0 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResult.java @@ -18,6 +18,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; /** * Profile results from a shard for the search phase. @@ -63,4 +64,18 @@ public List getQueryProfileResults() { public AggregationProfileShardResult getAggregationProfileResults() { return aggProfileShardResult; } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SearchProfileQueryPhaseResult other = (SearchProfileQueryPhaseResult) obj; + return queryProfileResults.equals(other.queryProfileResults) && aggProfileShardResult.equals(other.aggProfileShardResult); + } + + @Override + public int hashCode() { + return Objects.hash(queryProfileResults, aggProfileShardResult); + } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java index d56ac447deba8..4e7e467cf51dc 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java @@ -1,5 +1,3 @@ -package org.elasticsearch.search.profile; - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License @@ -8,6 +6,11 @@ * Side Public License, v 1. */ +package org.elasticsearch.search.profile; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; + import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -15,9 +18,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; -import org.elasticsearch.search.profile.aggregation.AggregationProfiler; import org.elasticsearch.search.profile.query.QueryProfileShardResult; -import org.elasticsearch.search.profile.query.QueryProfiler; import java.io.IOException; import java.util.ArrayList; @@ -34,42 +35,42 @@ */ public final class SearchProfileResults implements Writeable, ToXContentFragment { - private static final String SEARCHES_FIELD = "searches"; private static final String ID_FIELD = "id"; private static final String SHARDS_FIELD = "shards"; public static final String PROFILE_FIELD = "profile"; - private Map shardResults; + private Map shardResults; - public SearchProfileResults(Map shardResults) { + public SearchProfileResults(Map shardResults) { this.shardResults = Collections.unmodifiableMap(shardResults); } public SearchProfileResults(StreamInput in) throws IOException { - int size = in.readInt(); - shardResults = new HashMap<>(size); - - for (int i = 0; i < size; i++) { - String key = in.readString(); - SearchProfileQueryPhaseResult shardResult = new SearchProfileQueryPhaseResult(in); - shardResults.put(key, shardResult); + if (in.getVersion().onOrAfter(Version.V_7_16_0)) { + shardResults = in.readMap(StreamInput::readString, SearchProfileShardResult::new); + } else { + // Before 8.0.0 we only send the query phase result + shardResults = in.readMap( + StreamInput::readString, + i -> new SearchProfileShardResult(new SearchProfileQueryPhaseResult(i), null) + ); } - shardResults = Collections.unmodifiableMap(shardResults); - } - - public Map getShardResults() { - return this.shardResults; } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeInt(shardResults.size()); - for (Map.Entry entry : shardResults.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); + if (out.getVersion().onOrAfter(Version.V_7_16_0)) { + out.writeMap(shardResults, StreamOutput::writeString, (o, r) -> r.writeTo(o)); + } else { + // Before 8.0.0 we only send the query phase + out.writeMap(shardResults, StreamOutput::writeString, (o, r) -> r.getQueryPhase().writeTo(o)); } } + public Map getShardResults() { + return shardResults; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(PROFILE_FIELD).startArray(SHARDS_FIELD); @@ -79,28 +80,41 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (String key : sortedKeys) { builder.startObject(); builder.field(ID_FIELD, key); - builder.startArray(SEARCHES_FIELD); - SearchProfileQueryPhaseResult profileShardResult = shardResults.get(key); - for (QueryProfileShardResult result : profileShardResult.getQueryProfileResults()) { - result.toXContent(builder, params); - } - builder.endArray(); - profileShardResult.getAggregationProfileResults().toXContent(builder, params); + shardResults.get(key).toXContent(builder, params); builder.endObject(); } builder.endArray().endObject(); return builder; } + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SearchProfileResults other = (SearchProfileResults) obj; + return shardResults.equals(other.shardResults); + } + + @Override + public int hashCode() { + return shardResults.hashCode(); + } + + @Override + public String toString() { + return Strings.toString(this); + } + public static SearchProfileResults fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - Map searchProfileResults = new HashMap<>(); + Map profileResults = new HashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.START_ARRAY) { if (SHARDS_FIELD.equals(parser.currentName())) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - parseSearchProfileResultsEntry(parser, searchProfileResults); + parseProfileResultsEntry(parser, profileResults); } } else { parser.skipChildren(); @@ -109,15 +123,16 @@ public static SearchProfileResults fromXContent(XContentParser parser) throws IO parser.skipChildren(); } } - return new SearchProfileResults(searchProfileResults); + return new SearchProfileResults(profileResults); } - private static void parseSearchProfileResultsEntry(XContentParser parser, - Map searchProfileResults) throws IOException { + private static void parseProfileResultsEntry(XContentParser parser, + Map searchProfileResults) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); List queryProfileResults = new ArrayList<>(); AggregationProfileShardResult aggProfileShardResult = null; + ProfileResult fetchResult = null; String id = null; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -130,7 +145,7 @@ private static void parseSearchProfileResultsEntry(XContentParser parser, parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { - if (SEARCHES_FIELD.equals(currentFieldName)) { + if ("searches".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { queryProfileResults.add(QueryProfileShardResult.fromXContent(parser)); } @@ -139,32 +154,16 @@ private static void parseSearchProfileResultsEntry(XContentParser parser, } else { parser.skipChildren(); } + } else if (token == XContentParser.Token.START_OBJECT) { + fetchResult = ProfileResult.fromXContent(parser); } else { parser.skipChildren(); } } - searchProfileResults.put(id, new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult)); - } - - /** - * Helper method to convert Profiler into InternalProfileShardResults, which - * can be serialized to other nodes, emitted as JSON, etc. - * - * @param profilers - * The {@link Profilers} to convert into results - * @return A {@link SearchProfileQueryPhaseResult} representing the results for this - * shard - */ - public static SearchProfileQueryPhaseResult buildShardResults(Profilers profilers) { - List queryProfilers = profilers.getQueryProfilers(); - AggregationProfiler aggProfiler = profilers.getAggregationProfiler(); - List queryResults = new ArrayList<>(queryProfilers.size()); - for (QueryProfiler queryProfiler : queryProfilers) { - QueryProfileShardResult result = new QueryProfileShardResult(queryProfiler.getTree(), queryProfiler.getRewriteTime(), - queryProfiler.getCollector()); - queryResults.add(result); - } - AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree()); - return new SearchProfileQueryPhaseResult(queryResults, aggResults); + SearchProfileShardResult result = new SearchProfileShardResult( + new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult), + fetchResult + ); + searchProfileResults.put(id, result); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResultsBuilder.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResultsBuilder.java new file mode 100644 index 0000000000000..9884664d58738 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResultsBuilder.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.fetch.FetchSearchResult; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * Profile results for the query phase run on all shards. + */ +public class SearchProfileResultsBuilder { + private final Map queryPhaseResults; + + public SearchProfileResultsBuilder(Map queryPhaseResults) { + this.queryPhaseResults = Collections.unmodifiableMap(queryPhaseResults); + } + + /** + * Merge the profiling information from some fetch results into this + * profiling information. + */ + public SearchProfileResults build(Collection fetchResults) { + Map mergedShardResults = new HashMap<>(queryPhaseResults.size()); + for (SearchPhaseResult r : fetchResults) { + FetchSearchResult fr = r.fetchResult(); + String key = fr.getSearchShardTarget().toString(); + SearchProfileQueryPhaseResult queryPhase = queryPhaseResults.get(key); + if (queryPhase == null) { + throw new IllegalStateException( + "Profile returned fetch phase information for [" + + key + + "] but didn't return query phase information. Query phase keys were " + + queryPhaseResults.keySet() + ); + } + mergedShardResults.put(key, new SearchProfileShardResult(queryPhase, fr.profileResult())); + } + for (Map.Entry e : queryPhaseResults.entrySet()) { + if (false == mergedShardResults.containsKey(e.getKey())) { + mergedShardResults.put(e.getKey(), new SearchProfileShardResult(e.getValue(), null)); + } + } + return new SearchProfileResults(mergedShardResults); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + SearchProfileResultsBuilder other = (SearchProfileResultsBuilder) obj; + return queryPhaseResults.equals(other.queryPhaseResults); + } + + @Override + public int hashCode() { + return queryPhaseResults.hashCode(); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java new file mode 100644 index 0000000000000..3b67f0e27808b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResult.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * Profile results from a particular shard for all search phases. + */ +public class SearchProfileShardResult implements Writeable, ToXContentFragment { + private final SearchProfileQueryPhaseResult queryPhase; + + private final ProfileResult fetchPhase; + + public SearchProfileShardResult(SearchProfileQueryPhaseResult queryPhase, @Nullable ProfileResult fetch) { + this.queryPhase = queryPhase; + this.fetchPhase = fetch; + } + + public SearchProfileShardResult(StreamInput in) throws IOException { + queryPhase = new SearchProfileQueryPhaseResult(in); + fetchPhase = in.readOptionalWriteable(ProfileResult::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + queryPhase.writeTo(out); + out.writeOptionalWriteable(fetchPhase); + } + + public SearchProfileQueryPhaseResult getQueryPhase() { + return queryPhase; + } + + public ProfileResult getFetchPhase() { + return fetchPhase; + } + + public List getQueryProfileResults() { + return queryPhase.getQueryProfileResults(); + } + + public AggregationProfileShardResult getAggregationProfileResults() { + return queryPhase.getAggregationProfileResults(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("searches"); + for (QueryProfileShardResult result : queryPhase.getQueryProfileResults()) { + result.toXContent(builder, params); + } + builder.endArray(); + queryPhase.getAggregationProfileResults().toXContent(builder, params); + if (fetchPhase != null) { + builder.field("fetch"); + fetchPhase.toXContent(builder, params); + } + return builder; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SearchProfileShardResult other = (SearchProfileShardResult) obj; + return queryPhase.equals(other.queryPhase) && Objects.equals(fetchPhase, other.fetchPhase); + } + + @Override + public int hashCode() { + return Objects.hash(queryPhase, fetchPhase); + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java index 8184c30c34725..aae41f023a8db 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResult.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; @@ -55,7 +56,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - public List getProfileResults() { return Collections.unmodifiableList(aggProfileResults); } @@ -70,6 +70,25 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + AggregationProfileShardResult other = (AggregationProfileShardResult) obj; + return aggProfileResults.equals(other.aggProfileResults); + } + + @Override + public int hashCode() { + return aggProfileResults.hashCode(); + } + + @Override + public String toString() { + return Objects.toString(this); + } + public static AggregationProfileShardResult fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java index a0c2c5d7fc704..cbcee72029205 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.profile.query; import org.elasticsearch.common.xcontent.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -21,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; @@ -60,14 +62,14 @@ public class CollectorResult implements ToXContentObject, Writeable { /** * The total elapsed time for this Collector */ - private final Long time; + private final long time; /** * A list of children collectors "embedded" inside this collector */ private List children; - public CollectorResult(String collectorName, String reason, Long time, List children) { + public CollectorResult(String collectorName, String reason, long time, List children) { this.collectorName = collectorName; this.reason = reason; this.time = time; @@ -128,6 +130,28 @@ public List getProfiledChildren() { return children; } + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + CollectorResult other = (CollectorResult) obj; + return collectorName.equals(other.collectorName) + && reason.equals(other.reason) + && time == other.time + && children.equals(other.children); + } + + @Override + public int hashCode() { + return Objects.hash(collectorName, reason, time, children); + } + + @Override + public String toString() { + return Strings.toString(this); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder = builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index 9f2cb39886531..352f48ee6589e 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.profile.query; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -20,6 +21,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; @@ -100,6 +102,27 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryProfileShardResult other = (QueryProfileShardResult) obj; + return queryProfileResults.equals(other.queryProfileResults) + && profileCollector.equals(other.profileCollector) + && rewriteTime == other.rewriteTime; + } + + @Override + public int hashCode() { + return Objects.hash(queryProfileResults, profileCollector, rewriteTime); + } + + @Override + public String toString() { + return Strings.toString(this); + } + public static QueryProfileShardResult fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 3e884b65deb0b..f67083efb6593 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -42,8 +42,6 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; -import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.query.InternalProfileCollector; import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.sort.SortAndFormats; @@ -127,9 +125,7 @@ public void execute(SearchContext searchContext) throws QueryPhaseExecutionExcep aggregationPhase.execute(searchContext); if (searchContext.getProfilers() != null) { - SearchProfileQueryPhaseResult shardResults = SearchProfileResults - .buildShardResults(searchContext.getProfilers()); - searchContext.queryResult().profileResults(shardResults); + searchContext.queryResult().profileResults(searchContext.getProfilers().buildQueryPhaseResults()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index c4edbd240fad7..82090770bd30a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -26,6 +26,9 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; @@ -34,7 +37,11 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + public class FetchSearchPhaseTests extends ESTestCase { + private static final long FETCH_PROFILE_TIME = 555; public void testShortcutQueryAndFetchOptimization() { SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder()); @@ -43,6 +50,7 @@ public void testShortcutQueryAndFetchOptimization() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 1, exc -> {}); boolean hasHits = randomBoolean(); + boolean profiled = hasHits && randomBoolean(); final int numHits; if (hasHits) { QuerySearchResult queryResult = new QuerySearchResult(); @@ -50,9 +58,12 @@ public void testShortcutQueryAndFetchOptimization() { new ShardId("index", "index", 0), null, OriginalIndices.NONE)); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]); + addProfiling(profiled, queryResult); queryResult.size(1); FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + fetchResult.setSearchShardTarget(queryResult.getSearchShardTarget()); + SearchHits hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + fetchResult.shardResult(hits, fetchProfile(profiled)); QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult); fetchSearchResult.setShardIndex(0); results.consumeResult(fetchSearchResult, () -> {}); @@ -77,9 +88,21 @@ public void run() { if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } + assertProfiles(profiled, 1, searchResponse); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); } + private void assertProfiles(boolean profiled, int totalShards, SearchResponse searchResponse) { + if (false == profiled) { + assertThat(searchResponse.getProfileResults(), equalTo(org.elasticsearch.core.Map.of())); + return; + } + assertThat(searchResponse.getProfileResults().values().size(), equalTo(totalShards)); + for (SearchProfileShardResult profileShardResult : searchResponse.getProfileResults().values()) { + assertThat(profileShardResult.getFetchPhase().getTime(), equalTo(FETCH_PROFILE_TIME)); + } + } + public void testFetchTwoDocument() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder()); @@ -87,22 +110,26 @@ public void testFetchTwoDocument() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = randomIntBetween(2, 10); + boolean profiled = randomBoolean(); + ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx1, new SearchShardTarget("node1", new ShardId("test", "na", 0), - null, OriginalIndices.NONE), null); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - queryResult = new QuerySearchResult( - ctx2, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -110,14 +137,16 @@ public void testFetchTwoDocument() { public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); + SearchHits hits; if (request.contextId().equals(ctx2)) { - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard2Target); + hits = new SearchHits(new SearchHit[] { new SearchHit(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F); } else { assertEquals(ctx1, request.contextId()); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + fetchResult.setSearchShardTarget(shard1Target); + hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); } + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); } }; @@ -138,6 +167,7 @@ public void run() { assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(2, searchResponse.getSuccessfulShards()); + assertProfiles(profiled, 2, searchResponse); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); } @@ -148,21 +178,25 @@ public void testFailFetchOneDoc() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = randomIntBetween(2, 10); + boolean profiled = randomBoolean(); + final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -171,13 +205,17 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { if (request.contextId().getId() == 321) { FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard1Target); + SearchHits hits = new SearchHits( + new SearchHit[] { new SearchHit(84) }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + 2.0F + ); + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); } else { listener.onFailure(new MockDirectoryWrapper.FakeIOException()); } - } }; FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, @@ -199,6 +237,21 @@ public void run() { assertEquals(1, searchResponse.getShardFailures().length); assertTrue(searchResponse.getShardFailures()[0].getCause() instanceof MockDirectoryWrapper.FakeIOException); assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); + if (profiled) { + /* + * Shard 2 failed to fetch but still searched so it will have + * profiling information for the search on both shards but only + * for the fetch on the successful shard. + */ + assertThat(searchResponse.getProfileResults().values().size(), equalTo(2)); + assertThat(searchResponse.getProfileResults().get(shard1Target.toString()).getFetchPhase(), nullValue()); + assertThat( + searchResponse.getProfileResults().get(shard2Target.toString()).getFetchPhase().getTime(), + equalTo(FETCH_PROFILE_TIME) + ); + } else { + assertThat(searchResponse.getProfileResults(), equalTo(org.elasticsearch.core.Map.of())); + } assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx)); } @@ -206,18 +259,22 @@ public void testFetchDocsConcurrently() throws InterruptedException { int resultSetSize = randomIntBetween(0, 100); // we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert... int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard + boolean profiled = randomBoolean(); + SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); QueryPhaseResultConsumer results = controller.newSearchPhaseResults(EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), numHits, exc -> {}); + SearchShardTarget[] shardTargets = new SearchShardTarget[numHits]; for (int i = 0; i < numHits; i++) { - QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), shardTargets[i], null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(i); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); } mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -226,8 +283,13 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { new Thread(() -> { FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit((int) (request.contextId().getId()+1))}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 100F)); + fetchResult.setSearchShardTarget(shardTargets[(int) request.contextId().getId()]); + SearchHits hits = new SearchHits( + new SearchHit[] { new SearchHit((int) (request.contextId().getId() + 1)) }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + 100F + ); + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); }).start(); } @@ -257,6 +319,19 @@ public void run() { } assertEquals(0, searchResponse.getFailedShards()); assertEquals(numHits, searchResponse.getSuccessfulShards()); + if (profiled) { + assertThat(searchResponse.getProfileResults().values().size(), equalTo(numHits)); + int count = 0; + for (SearchProfileShardResult profileShardResult : searchResponse.getProfileResults().values()) { + if (profileShardResult.getFetchPhase() != null) { + count++; + assertThat(profileShardResult.getFetchPhase().getTime(), equalTo(FETCH_PROFILE_TIME)); + } + } + assertThat(count, equalTo(Math.min(numHits, resultSetSize))); + } else { + assertThat(searchResponse.getProfileResults(), equalTo(org.elasticsearch.core.Map.of())); + } int sizeReleasedContexts = Math.max(0, numHits - resultSetSize); // all non fetched results will be freed assertEquals(mockSearchPhaseContext.releasedSearchContexts.toString(), sizeReleasedContexts, mockSearchPhaseContext.releasedSearchContexts.size()); @@ -270,22 +345,26 @@ public void testExceptionFailsPhase() { new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = randomIntBetween(2, 10); - QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), - null, OriginalIndices.NONE), null); + boolean profiled = randomBoolean(); + + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); - queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); + AtomicInteger numFetches = new AtomicInteger(0); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @Override @@ -295,14 +374,16 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe if (numFetches.incrementAndGet() == 1) { throw new RuntimeException("BOOM"); } + SearchHits hits; if (request.contextId().getId() == 321) { - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard2Target); + hits = new SearchHits(new SearchHit[] { new SearchHit(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F); } else { + fetchResult.setSearchShardTarget(shard1Target); assertEquals(request, 123); - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); } + fetchResult.shardResult(hits, fetchProfile(profiled)); listener.onResponse(fetchResult); } }; @@ -328,22 +409,26 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP, mockSearchPhaseContext.getRequest(), 2, exc -> {}); int resultSetSize = 1; + boolean profiled = randomBoolean(); + final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx1, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); + SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE); + QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); - queryResult = new QuerySearchResult(ctx2, - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); + SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE); + queryResult = new QuerySearchResult(ctx2, shard2Target, null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); + addProfiling(profiled, queryResult); results.consumeResult(queryResult, () -> {}); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) { @@ -352,8 +437,13 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); if (request.contextId().getId() == 321) { - fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); + fetchResult.setSearchShardTarget(shard1Target); + SearchHits hits = new SearchHits( + new SearchHit[] { new SearchHit(84) }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + 2.0F + ); + fetchResult.shardResult(hits, fetchProfile(profiled)); } else { fail("requestID 123 should not be fetched but was"); } @@ -377,7 +467,34 @@ public void run() { assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(2, searchResponse.getSuccessfulShards()); + if (profiled) { + assertThat(searchResponse.getProfileResults().size(), equalTo(2)); + assertThat(searchResponse.getProfileResults().get(shard1Target.toString()).getFetchPhase(), nullValue()); + assertThat( + searchResponse.getProfileResults().get(shard2Target.toString()).getFetchPhase().getTime(), + equalTo(FETCH_PROFILE_TIME) + ); + } assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx1)); } + + private void addProfiling(boolean profiled, QuerySearchResult queryResult) { + if (profiled) { + queryResult.profileResults(new SearchProfileQueryPhaseResult(org.elasticsearch.core.List.of(), null)); + } + } + + private ProfileResult fetchProfile(boolean profiled) { + return profiled + ? new ProfileResult( + "fetch", + "fetch", + org.elasticsearch.core.Map.of(), + org.elasticsearch.core.Map.of(), + FETCH_PROFILE_TIME, + org.elasticsearch.core.List.of() + ) + : null; + } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 94ef815c9885c..479ea3a2c1341 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -52,6 +52,9 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.suggest.SortBy; import org.elasticsearch.search.suggest.Suggest; @@ -82,10 +85,16 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; +import static java.util.stream.Collectors.toList; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -138,7 +147,7 @@ public void testSortDocs() { } int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); - AtomicArray results = generateQueryResults(nShards, suggestions, queryResultSize, false); + AtomicArray results = generateQueryResults(nShards, suggestions, queryResultSize, false, false); Optional first = results.asList().stream().findFirst(); int from = 0, size = 0; if (first.isPresent()) { @@ -210,7 +219,7 @@ private AtomicArray generateSeededQueryResults(long seed, int List suggestions, int searchHitsSize, boolean useConstantScore) throws Exception { return RandomizedContext.current().runWithPrivateRandomness(seed, - () -> generateQueryResults(nShards, suggestions, searchHitsSize, useConstantScore)); + () -> generateQueryResults(nShards, suggestions, searchHitsSize, useConstantScore, false)); } public void testMerge() { @@ -223,13 +232,26 @@ public void testMerge() { } int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); - AtomicArray queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false); + boolean profile = randomBoolean(); for (int trackTotalHits : new int[] { SearchContext.TRACK_TOTAL_HITS_DISABLED, SearchContext.TRACK_TOTAL_HITS_ACCURATE }) { - SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList(), - new ArrayList<>(), new ArrayList<>(), new SearchPhaseController.TopDocsStats(trackTotalHits), - 0, true, InternalAggregationTestCase.emptyReduceContextBuilder(), true); - AtomicArray fetchResults = generateFetchResults(nShards, - reducedQueryPhase.sortedTopDocs.scoreDocs, reducedQueryPhase.suggest); + AtomicArray queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false, profile); + SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase( + queryResults.asList(), + new ArrayList<>(), + new ArrayList<>(), + new SearchPhaseController.TopDocsStats(trackTotalHits), + 0, + true, + InternalAggregationTestCase.emptyReduceContextBuilder(), + true + ); + List shards = queryResults.asList().stream().map(SearchPhaseResult::getSearchShardTarget).collect(toList()); + AtomicArray fetchResults = generateFetchResults( + shards, + reducedQueryPhase.sortedTopDocs.scoreDocs, + reducedQueryPhase.suggest, + profile + ); InternalSearchResponse mergedResponse = searchPhaseController.merge(false, reducedQueryPhase, fetchResults.asList(), fetchResults::get); if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { @@ -263,6 +285,21 @@ public void testMerge() { } } } + if (profile) { + assertThat(mergedResponse.profile().entrySet(), hasSize(nShards)); + assertThat( // All shards should have a query profile + mergedResponse.profile().toString(), + mergedResponse.profile().values().stream().filter(r -> r.getQueryProfileResults() != null).count(), + equalTo((long) nShards) + ); + assertThat( // Some or all shards should have a fetch profile + mergedResponse.profile().toString(), + mergedResponse.profile().values().stream().filter(r -> r.getFetchPhase() != null).count(), + both(greaterThan(0L)).and(lessThanOrEqualTo((long) nShards)) + ); + } else { + assertThat(mergedResponse.profile(), is(anEmptyMap())); + } } } @@ -273,8 +310,13 @@ public void testMerge() { * {@link SearchPhaseController#reducedQueryPhase} does, * meaning that the returned query results can be fed directly to {@link SearchPhaseController#sortDocs} */ - private static AtomicArray generateQueryResults(int nShards, List suggestions, - int searchHitsSize, boolean useConstantScore) { + private static AtomicArray generateQueryResults( + int nShards, + List suggestions, + int searchHitsSize, + boolean useConstantScore, + boolean profile + ) { AtomicArray queryResults = new AtomicArray<>(nShards); for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { String clusterAlias = randomBoolean() ? null : "remote"; @@ -318,6 +360,14 @@ private static AtomicArray generateQueryResults(int nShards, querySearchResult.size(searchHitsSize); querySearchResult.suggest(new Suggest(new ArrayList<>(shardSuggestion))); querySearchResult.setShardIndex(shardIndex); + if (profile) { + querySearchResult.profileResults( + new SearchProfileQueryPhaseResult( + org.elasticsearch.core.List.of(), + new AggregationProfileShardResult(org.elasticsearch.core.List.of()) + ) + ); + } queryResults.set(shardIndex, querySearchResult); } return queryResults; @@ -346,12 +396,16 @@ private static List reducedSuggest(AtomicArray generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) { - AtomicArray fetchResults = new AtomicArray<>(nShards); - for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { + private static AtomicArray generateFetchResults( + List shards, + ScoreDoc[] mergedSearchDocs, + Suggest mergedSuggest, + boolean profile + ) { + AtomicArray fetchResults = new AtomicArray<>(shards.size()); + for (int shardIndex = 0; shardIndex < shards.size(); shardIndex++) { float maxScore = -1F; - String clusterAlias = randomBoolean() ? null : "remote"; - SearchShardTarget shardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), clusterAlias, OriginalIndices.NONE); + SearchShardTarget shardTarget = shards.get(shardIndex); FetchSearchResult fetchSearchResult = new FetchSearchResult(new ShardSearchContextId("", shardIndex), shardTarget); List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { @@ -376,7 +430,17 @@ private static AtomicArray generateFetchResults(int nShards, } } SearchHit[] hits = searchHits.toArray(new SearchHit[0]); - fetchSearchResult.hits(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore)); + ProfileResult profileResult = profile && searchHits.size() > 0 + ? new ProfileResult( + "fetch", + "fetch", + org.elasticsearch.core.Map.of(), + org.elasticsearch.core.Map.of(), + randomNonNegativeLong(), + org.elasticsearch.core.List.of() + ) + : null; + fetchSearchResult.shardResult(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore), profileResult); fetchResults.set(shardIndex, fetchSearchResult); } return fetchResults; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 592c356a32a48..b6d9e40878f77 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; -import org.elasticsearch.core.Tuple; import org.elasticsearch.common.text.Text; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.DocValueFormat; @@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; -import org.elasticsearch.search.profile.SearchProfileShardResultsTests; +import org.elasticsearch.search.profile.SearchProfileResultsTests; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.test.ESTestCase; @@ -212,9 +212,9 @@ public void testMergeProfileResults() throws InterruptedException { SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0); SearchResponseMerger merger = new SearchResponseMerger(0, 0, SearchContext.TRACK_TOTAL_HITS_ACCURATE, searchTimeProvider, emptyReduceContextBuilder()); - Map expectedProfile = new HashMap<>(); + Map expectedProfile = new HashMap<>(); for (int i = 0; i < numResponses; i++) { - SearchProfileResults profile = SearchProfileShardResultsTests.createTestItem(); + SearchProfileResults profile = SearchProfileResultsTests.createTestItem(); expectedProfile.putAll(profile.getShardResults()); SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 23cd25ad85f35..011e87bffdeee 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; -import org.elasticsearch.search.profile.SearchProfileShardResultsTests; +import org.elasticsearch.search.profile.SearchProfileResultsTests; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestTests; import org.elasticsearch.test.ESTestCase; @@ -105,9 +105,16 @@ private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... sha SearchHits hits = SearchHitsTests.createTestItem(true, true); InternalAggregations aggregations = aggregationsTests.createTestInstance(); Suggest suggest = SuggestTests.createTestItem(); - SearchProfileResults profileShardResults = SearchProfileShardResultsTests.createTestItem(); - internalSearchResponse = new InternalSearchResponse(hits, aggregations, suggest, profileShardResults, - timedOut, terminatedEarly, numReducePhases); + SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem(); + internalSearchResponse = new InternalSearchResponse( + hits, + aggregations, + suggest, + profileResults, + timedOut, + terminatedEarly, + numReducePhases + ); } else { internalSearchResponse = InternalSearchResponse.empty(); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchProfilerTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchProfilerTests.java new file mode 100644 index 0000000000000..242ecca404f52 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchProfilerTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.fetch; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static io.github.nik9000.mapmatcher.MapMatcher.assertMap; +import static io.github.nik9000.mapmatcher.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.equalTo; + +public class FetchProfilerTests extends ESTestCase { + public void testTime() { + long startTime = randomLongBetween(0, Long.MAX_VALUE / 2); + FetchProfiler profiler = new FetchProfiler(startTime); + long elapsed = randomLongBetween(0, Long.MAX_VALUE / 2); + ProfileResult result = profiler.finish(startTime + elapsed); + assertThat(result.getTime(), equalTo(elapsed)); + } + + public void testStoredFieldsIsOrdered() throws IOException { + FetchProfiler profiler = new FetchProfiler(); + profiler.visitor(new CustomFieldsVisitor(org.elasticsearch.core.Set.of(), true)); + ProfileResult result = profiler.finish(); + assertMap(result.getDebugInfo(), matchesMap().entry("stored_fields", org.elasticsearch.core.List.of("_id", "_routing", "_source"))); + // Make sure that serialization preserves the order + ProfileResult copy = copyWriteable(result, new NamedWriteableRegistry(org.elasticsearch.core.List.of()), ProfileResult::new); + assertMap(copy.getDebugInfo(), matchesMap().entry("stored_fields", org.elasticsearch.core.List.of("_id", "_routing", "_source"))); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java index 19edfb4a313e2..2cb3fed738ba2 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java @@ -9,13 +9,12 @@ package org.elasticsearch.search.profile; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.ArrayList; @@ -25,12 +24,9 @@ import java.util.Map; import java.util.function.Predicate; -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; - -public class ProfileResultTests extends ESTestCase { +public class ProfileResultTests extends AbstractSerializingTestCase { + public static final Predicate RANDOM_FIELDS_EXCLUDE_FILTER = s -> s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()) + || s.endsWith(ProfileResult.DEBUG.getPreferredName()); public static ProfileResult createTestItem(int depth) { String type = randomAlphaOfLengthBetween(5, 10); @@ -58,41 +54,24 @@ public static ProfileResult createTestItem(int depth) { return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children); } - public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(false); + @Override + protected ProfileResult createTestInstance() { + return createTestItem(2); } - /** - * This test adds random fields and objects to the xContent rendered out to ensure we can parse it - * back to be forward compatible with additions to the xContent - */ - public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(true); + @Override + protected Reader instanceReader() { + return ProfileResult::new; } - private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { - ProfileResult profileResult = createTestItem(2); - XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - BytesReference mutated; - if (addRandomFields) { - // "breakdown" and "debug" just consists of key/value pairs, we shouldn't add anything random there - Predicate excludeFilter = (s) -> - s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()) || s.endsWith(ProfileResult.DEBUG.getPreferredName()); - mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); - } else { - mutated = originalBytes; - } - ProfileResult parsed; - try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = ProfileResult.fromXContent(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertNull(parser.nextToken()); - } - assertEquals(profileResult.getTime(), parsed.getTime()); - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); + @Override + protected ProfileResult doParseInstance(XContentParser parser) throws IOException { + return ProfileResult.fromXContent(parser); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return RANDOM_FIELDS_EXCLUDE_FILTER; } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResultTests.java new file mode 100644 index 0000000000000..85b3ee9c7ceb5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileQueryPhaseResultTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResultTests; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; +import org.elasticsearch.search.profile.query.QueryProfileShardResultTests; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.util.ArrayList; +import java.util.List; + +public class SearchProfileQueryPhaseResultTests extends AbstractWireSerializingTestCase { + static SearchProfileQueryPhaseResult createTestItem() { + List queryProfileResults = new ArrayList<>(); + int queryItems = rarely() ? 0 : randomIntBetween(1, 2); + for (int q = 0; q < queryItems; q++) { + queryProfileResults.add(QueryProfileShardResultTests.createTestItem()); + } + AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1); + return new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult); + } + + @Override + protected SearchProfileQueryPhaseResult createTestInstance() { + return createTestItem(); + } + + @Override + protected Reader instanceReader() { + return SearchProfileQueryPhaseResult::new; + } +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java new file mode 100644 index 0000000000000..c174fd0cec7f4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsBuilderTests.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.matchesPattern; + +public class SearchProfileResultsBuilderTests extends ESTestCase { + public void testFetchWithoutQuery() { + Map searchPhase = randomSearchPhaseResults(); + FetchSearchResult fetchPhase = fetchResult( + randomValueOtherThanMany(searchPhase::containsKey, SearchProfileResultsBuilderTests::randomTarget), + null + ); + Exception e = expectThrows( + IllegalStateException.class, + () -> builder(searchPhase).build(org.elasticsearch.core.List.of(fetchPhase)) + ); + assertThat( + e.getMessage(), + matchesPattern( + "Profile returned fetch phase information for .+ but didn't return query phase information\\. Query phase keys were .+" + ) + ); + } + + public void testQueryWithoutAnyFetch() { + Map searchPhase = randomSearchPhaseResults(); + FetchSearchResult fetchPhase = fetchResult(searchPhase.keySet().iterator().next(), null); + SearchProfileResults result = builder(searchPhase).build(org.elasticsearch.core.List.of(fetchPhase)); + assertThat( + result.getShardResults().values().stream().filter(r -> r.getQueryPhase() != null).count(), + equalTo((long) searchPhase.size()) + ); + assertThat(result.getShardResults().values().stream().filter(r -> r.getFetchPhase() != null).count(), equalTo(0L)); + } + + public void testQueryAndFetch() { + Map searchPhase = randomSearchPhaseResults(); + List fetchPhase = searchPhase.entrySet() + .stream() + .map( + e -> fetchResult( + e.getKey(), + new ProfileResult( + "fetch", + "", + org.elasticsearch.core.Map.of(), + org.elasticsearch.core.Map.of(), + 1, + org.elasticsearch.core.List.of() + ) + ) + ) + .collect(toList()); + SearchProfileResults result = builder(searchPhase).build(fetchPhase); + assertThat( + result.getShardResults().values().stream().filter(r -> r.getQueryPhase() != null).count(), + equalTo((long) searchPhase.size()) + ); + assertThat( + result.getShardResults().values().stream().filter(r -> r.getFetchPhase() != null).count(), + equalTo((long) searchPhase.size()) + ); + } + + private static Map randomSearchPhaseResults() { + int size = rarely() ? 0 : randomIntBetween(1, 2); + Map results = new HashMap<>(size); + while (results.size() < size) { + results.put(randomTarget(), SearchProfileQueryPhaseResultTests.createTestItem()); + } + return results; + } + + private static SearchProfileResultsBuilder builder(Map searchPhase) { + return new SearchProfileResultsBuilder( + searchPhase.entrySet().stream().collect(toMap(e -> e.getKey().toString(), Map.Entry::getValue)) + ); + } + + private static FetchSearchResult fetchResult(SearchShardTarget target, ProfileResult profileResult) { + FetchSearchResult fetchResult = new FetchSearchResult(); + fetchResult.shardResult(SearchHits.empty(), profileResult); + fetchResult.setSearchShardTarget(target); + return fetchResult; + } + + private static SearchShardTarget randomTarget() { + return new SearchShardTarget(randomAlphaOfLength(5), new ShardId(randomAlphaOfLength(5), "uuid", randomInt(6)), null, null); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java new file mode 100644 index 0000000000000..00bcee23d7fbb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; + +public class SearchProfileResultsTests extends AbstractSerializingTestCase { + public static SearchProfileResults createTestItem() { + int size = rarely() ? 0 : randomIntBetween(1, 2); + Map shards = new HashMap<>(size); + for (int i = 0; i < size; i++) { + SearchProfileQueryPhaseResult searchResult = SearchProfileQueryPhaseResultTests.createTestItem(); + ProfileResult fetchResult = randomBoolean() ? null : ProfileResultTests.createTestItem(2); + shards.put( + randomAlphaOfLengthBetween(5, 10), + new SearchProfileShardResult(searchResult, fetchResult) + ); + } + return new SearchProfileResults(shards); + } + + @Override + protected SearchProfileResults createTestInstance() { + return createTestItem(); + } + + @Override + protected Reader instanceReader() { + return SearchProfileResults::new; + } + + @Override + protected SearchProfileResults doParseInstance(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + SearchProfileResults result = SearchProfileResults.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return result; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return ProfileResultTests.RANDOM_FIELDS_EXCLUDE_FILTER; + } +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultTests.java new file mode 100644 index 0000000000000..99228843c817a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultTests.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.profile; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class SearchProfileShardResultTests extends AbstractWireSerializingTestCase { + static SearchProfileShardResult createTestItem() { + SearchProfileQueryPhaseResult searchResult = SearchProfileQueryPhaseResultTests.createTestItem(); + ProfileResult fetchResult = randomBoolean() ? null : ProfileResultTests.createTestItem(2); + return new SearchProfileShardResult(searchResult, fetchResult); + } + + @Override + protected SearchProfileShardResult createTestInstance() { + return createTestItem(); + } + + @Override + protected Reader instanceReader() { + return SearchProfileShardResult::new; + } +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java deleted file mode 100644 index 72ad64111e6b5..0000000000000 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.search.profile; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; -import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResultTests; -import org.elasticsearch.search.profile.query.QueryProfileShardResult; -import org.elasticsearch.search.profile.query.QueryProfileShardResultTests; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; - -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; -import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; - -public class SearchProfileShardResultsTests extends ESTestCase { - - public static SearchProfileResults createTestItem() { - int size = rarely() ? 0 : randomIntBetween(1, 2); - Map searchProfileResults = new HashMap<>(size); - for (int i = 0; i < size; i++) { - List queryProfileResults = new ArrayList<>(); - int queryItems = rarely() ? 0 : randomIntBetween(1, 2); - for (int q = 0; q < queryItems; q++) { - queryProfileResults.add(QueryProfileShardResultTests.createTestItem()); - } - AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1); - searchProfileResults.put( - randomAlphaOfLengthBetween(5, 10), - new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult) - ); - } - return new SearchProfileResults(searchProfileResults); - } - - public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(false); - } - - /** - * This test adds random fields and objects to the xContent rendered out to ensure we can parse it - * back to be forward compatible with additions to the xContent - */ - public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(true); - } - - private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { - SearchProfileResults shardResult = createTestItem(); - XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(shardResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - BytesReference mutated; - if (addRandomFields) { - // The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there - // also we don't want to insert into the root object here, its just the PROFILE_FIELD itself - Predicate excludeFilter = (s) -> s.isEmpty() - || s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()) - || s.endsWith(ProfileResult.DEBUG.getPreferredName()); - mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); - } else { - mutated = originalBytes; - } - SearchProfileResults parsed; - try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD); - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = SearchProfileResults.fromXContent(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); - } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); - - } - -} diff --git a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java index 30e572acf1016..bf2b5dc633916 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfileShardResultTests.java @@ -9,13 +9,13 @@ package org.elasticsearch.search.profile.aggregation; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileResultTests; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.ArrayList; @@ -23,11 +23,11 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; -public class AggregationProfileShardResultTests extends ESTestCase { +public class AggregationProfileShardResultTests extends AbstractSerializingTestCase { public static AggregationProfileShardResult createTestItem(int depth) { int size = randomIntBetween(0, 5); @@ -38,22 +38,30 @@ public static AggregationProfileShardResult createTestItem(int depth) { return new AggregationProfileShardResult(aggProfileResults); } - public void testFromXContent() throws IOException { - AggregationProfileShardResult profileResult = createTestItem(2); - XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); + @Override + protected AggregationProfileShardResult createTestInstance() { + return createTestItem(2); + } - AggregationProfileShardResult parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - XContentParserUtils.ensureFieldName(parser, parser.nextToken(), AggregationProfileShardResult.AGGREGATIONS); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - parsed = AggregationProfileShardResult.fromXContent(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); - } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); + @Override + protected AggregationProfileShardResult doParseInstance(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParserUtils.ensureFieldName(parser, parser.nextToken(), AggregationProfileShardResult.AGGREGATIONS); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); + AggregationProfileShardResult result = AggregationProfileShardResult.fromXContent(parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_ARRAY, parser.currentToken(), parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + return result; + } + + @Override + protected Reader instanceReader() { + return AggregationProfileShardResult::new; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return ProfileResultTests.RANDOM_FIELDS_EXCLUDE_FILTER; } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java index c1631c8578a23..f782c9c9a683a 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java @@ -9,26 +9,21 @@ package org.elasticsearch.search.profile.query; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; - -public class CollectorResultTests extends ESTestCase { +public class CollectorResultTests extends AbstractSerializingTestCase { public static CollectorResult createTestItem(int depth) { String name = randomAlphaOfLengthBetween(5, 10); String reason = randomAlphaOfLengthBetween(5, 10); @@ -47,31 +42,22 @@ public static CollectorResult createTestItem(int depth) { return new CollectorResult(name, reason, time, children); } - public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(false); + @Override + protected CollectorResult createTestInstance() { + return createTestItem(1); } - public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(true); + @Override + protected CollectorResult doParseInstance(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + CollectorResult result = CollectorResult.fromXContent(parser); + ensureExpectedToken(null, parser.nextToken(), parser); + return result; } - private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { - CollectorResult collectorResult = createTestItem(1); - XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(collectorResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - BytesReference mutated; - if (addRandomFields) { - mutated = insertRandomFields(xContentType, originalBytes, null, random()); - } else { - mutated = originalBytes; - } - try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - CollectorResult parsed = CollectorResult.fromXContent(parser); - assertNull(parser.nextToken()); - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); - } + @Override + protected Reader instanceReader() { + return CollectorResult::new; } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java index cf150b105d9bd..f103146ef0242 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java @@ -8,24 +8,20 @@ package org.elasticsearch.search.profile.query; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileResultTests; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.function.Predicate; -import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; - -public class QueryProfileShardResultTests extends ESTestCase { +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +public class QueryProfileShardResultTests extends AbstractSerializingTestCase { public static QueryProfileShardResult createTestItem() { int size = randomIntBetween(0, 5); List queryProfileResults = new ArrayList<>(size); @@ -40,20 +36,26 @@ public static QueryProfileShardResult createTestItem() { return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector); } - public void testFromXContent() throws IOException { - QueryProfileShardResult profileResult = createTestItem(); - XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - - QueryProfileShardResult parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = QueryProfileShardResult.fromXContent(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertNull(parser.nextToken()); - } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); + @Override + protected QueryProfileShardResult createTestInstance() { + return createTestItem(); + } + + @Override + protected QueryProfileShardResult doParseInstance(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + QueryProfileShardResult result = QueryProfileShardResult.fromXContent(parser); + ensureExpectedToken(null, parser.nextToken(), parser); + return result; } + @Override + protected Reader instanceReader() { + return QueryProfileShardResult::new; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return ProfileResultTests.RANDOM_FIELDS_EXCLUDE_FILTER; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java index 37ae8d050c513..b129fb6517dd2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java @@ -16,6 +16,8 @@ import java.io.IOException; import java.util.Collections; +import static org.hamcrest.Matchers.equalTo; + /** * Standard test case for testing wire serialization. If the class being tested * extends {@link Writeable} then prefer extending {@link AbstractWireSerializingTestCase}. @@ -83,8 +85,8 @@ protected final void assertSerialization(T testInstance, Version version) throws */ protected void assertEqualInstances(T expectedInstance, T newInstance) { assertNotSame(newInstance, expectedInstance); - assertEquals(expectedInstance, newInstance); - assertEquals(expectedInstance.hashCode(), newInstance.hashCode()); + assertThat(newInstance, equalTo(expectedInstance)); + assertThat(newInstance.hashCode(), equalTo(expectedInstance.hashCode())); } protected final T copyInstance(T instance) throws IOException { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 96dd795513232..7fc4883ed6e89 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -28,9 +30,9 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.FuzzyQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -47,7 +49,8 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.builder.PointInTimeBuilder; -import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortMode; @@ -63,8 +66,6 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.action.search.ClosePointInTimeAction; -import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.spatial.SpatialPlugin; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder; @@ -77,6 +78,7 @@ import java.util.List; import java.util.Map; +import static java.util.stream.Collectors.toList; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -1414,13 +1416,15 @@ public void testProfile() throws Exception { assertNoFailures(response); assertThat(response.getProfileResults().size(), equalTo(1)); - SearchProfileQueryPhaseResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]); + SearchProfileShardResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]); assertThat(shardResult.getQueryProfileResults().size(), equalTo(1)); QueryProfileShardResult queryProfileShardResult = shardResult.getQueryProfileResults().get(0); assertThat(queryProfileShardResult.getQueryResults().size(), equalTo(1)); logger.info("queryProfileShardResult=" + Strings.toString(queryProfileShardResult)); -// ProfileResult profileResult = queryProfileShardResult.getQueryResults().get(0); -// assertThat(profileResult.getLuceneDescription(), equalTo("(other_field:value)^0.8")); + assertThat( + queryProfileShardResult.getQueryResults().stream().map(ProfileResult::getLuceneDescription).sorted().collect(toList()), + equalTo(org.elasticsearch.core.List.of("(other_field:value)^0.8")) + ); final String[] indices = randomFrom(org.elasticsearch.core.List.of(