diff --git a/.gitattributes b/.gitattributes index 04881c92ede00..a0f434f16b32b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -11,6 +11,7 @@ x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/*.interp li x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer*.java linguist-generated=true x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser*.java linguist-generated=true x-pack/plugin/esql/src/main/generated/** linguist-generated=true +x-pack/plugin/esql/src/main/generated-src/** linguist-generated=true # ESQL functions docs are autogenerated. More information at `docs/reference/esql/functions/README.md` docs/reference/esql/functions/*/** linguist-generated=true diff --git a/benchmarks/README.md b/benchmarks/README.md index d7b324acfef81..0cf95a2e81b9a 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -126,9 +126,12 @@ exit Grab the async profiler from https://github.com/jvm-profiling-tools/async-profiler and run `prof async` like so: ``` -gradlew -p benchmarks/ run --args 'LongKeyedBucketOrdsBenchmark.multiBucket -prof "async:libPath=/home/nik9000/Downloads/tmp/async-profiler-1.8.3-linux-x64/build/libasyncProfiler.so;dir=/tmp/prof;output=flamegraph"' +gradlew -p benchmarks/ run --args 'LongKeyedBucketOrdsBenchmark.multiBucket -prof "async:libPath=/home/nik9000/Downloads/async-profiler-3.0-29ee888-linux-x64/lib/libasyncProfiler.so;dir=/tmp/prof;output=flamegraph"' ``` +Note: As of January 2025 the latest release of async profiler doesn't work + with our JDK but the nightly is fine. + If you are on Mac, this'll warn you that you downloaded the shared library from the internet. You'll need to go to settings and allow it to run. diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index d3259b9604717..19d72a1f84f25 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; @@ -96,6 +97,9 @@ public class EvalBenchmark { "add_double", "case_1_eager", "case_1_lazy", + "coalesce_2_noop", + "coalesce_2_eager", + "coalesce_2_lazy", "date_trunc", "equal_to_const", "long_equal_to_long", @@ -142,8 +146,34 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { lhs = new Add(Source.EMPTY, lhs, new Literal(Source.EMPTY, 1L, DataType.LONG)); rhs = new Add(Source.EMPTY, rhs, new Literal(Source.EMPTY, 1L, DataType.LONG)); } - yield EvalMapper.toEvaluator(FOLD_CONTEXT, new Case(Source.EMPTY, condition, List.of(lhs, rhs)), layout(f1, f2)) - .get(driverContext); + EvalOperator.ExpressionEvaluator evaluator = EvalMapper.toEvaluator( + FOLD_CONTEXT, + new Case(Source.EMPTY, condition, List.of(lhs, rhs)), + layout(f1, f2) + ).get(driverContext); + String desc = operation.endsWith("lazy") ? "CaseLazyEvaluator" : "CaseEagerEvaluator"; + if (evaluator.toString().contains(desc) == false) { + throw new IllegalArgumentException("Evaluator was [" + evaluator + "] but expected one containing [" + desc + "]"); + } + yield evaluator; + } + case "coalesce_2_noop", "coalesce_2_eager", "coalesce_2_lazy" -> { + FieldAttribute f1 = longField(); + FieldAttribute f2 = longField(); + Expression lhs = f1; + if (operation.endsWith("lazy")) { + lhs = new Add(Source.EMPTY, lhs, new Literal(Source.EMPTY, 1L, DataType.LONG)); + } + EvalOperator.ExpressionEvaluator evaluator = EvalMapper.toEvaluator( + FOLD_CONTEXT, + new Coalesce(Source.EMPTY, lhs, List.of(f2)), + layout(f1, f2) + ).get(driverContext); + String desc = operation.endsWith("lazy") ? "CoalesceLazyEvaluator" : "CoalesceEagerEvaluator"; + if (evaluator.toString().contains(desc) == false) { + throw new IllegalArgumentException("Evaluator was [" + evaluator + "] but expected one containing [" + desc + "]"); + } + yield evaluator; } case "date_trunc" -> { FieldAttribute timestamp = new FieldAttribute( @@ -260,6 +290,38 @@ private static void checkExpected(String operation, Page actual) { } } } + case "coalesce_2_noop" -> { + LongVector f1 = actual.getBlock(0).asVector(); + LongVector result = actual.getBlock(2).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + long expected = f1.getLong(i); + if (result.getLong(i) != expected) { + throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + result.getLong(i) + "]"); + } + } + } + case "coalesce_2_eager" -> { + LongBlock f1 = actual.getBlock(0); + LongVector f2 = actual.getBlock(1).asVector(); + LongVector result = actual.getBlock(2).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + long expected = i % 5 == 0 ? f2.getLong(i) : f1.getLong(f1.getFirstValueIndex(i)); + if (result.getLong(i) != expected) { + throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + result.getLong(i) + "]"); + } + } + } + case "coalesce_2_lazy" -> { + LongBlock f1 = actual.getBlock(0); + LongVector f2 = actual.getBlock(1).asVector(); + LongVector result = actual.getBlock(2).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + long expected = i % 5 == 0 ? f2.getLong(i) : f1.getLong(f1.getFirstValueIndex(i)) + 1; + if (result.getLong(i) != expected) { + throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + result.getLong(i) + "]"); + } + } + } case "date_trunc" -> { LongVector v = actual.getBlock(1).asVector(); long oneDay = TimeValue.timeValueHours(24).millis(); @@ -304,7 +366,7 @@ private static void checkExpected(String operation, Page actual) { } } } - default -> throw new UnsupportedOperationException(); + default -> throw new UnsupportedOperationException(operation); } } @@ -324,7 +386,7 @@ private static Page page(String operation) { } yield new Page(builder.build()); } - case "case_1_eager", "case_1_lazy" -> { + case "case_1_eager", "case_1_lazy", "coalesce_2_noop" -> { var f1 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); var f2 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { @@ -333,6 +395,19 @@ private static Page page(String operation) { } yield new Page(f1.build(), f2.build()); } + case "coalesce_2_eager", "coalesce_2_lazy" -> { + var f1 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); + var f2 = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); + for (int i = 0; i < BLOCK_LENGTH; i++) { + if (i % 5 == 0) { + f1.appendNull(); + } else { + f1.appendLong(i); + } + f2.appendLong(-i); + } + yield new Page(f1.build(), f2.build()); + } case "long_equal_to_long" -> { var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); var rhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 87521795d8824..4085e74d35db6 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -78,7 +78,7 @@ public class ScriptScoreBenchmark { private final PluginsService pluginsService = new PluginsService( Settings.EMPTY, null, - PluginsLoader.createPluginsLoader(Set.of(), PluginsLoader.loadPluginsBundles(Path.of(System.getProperty("plugins.dir")))) + PluginsLoader.createPluginsLoader(Set.of(), PluginsLoader.loadPluginsBundles(Path.of(System.getProperty("plugins.dir"))), Map.of()) ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 06d76f125efdb..aaa43900badb9 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Stream; @@ -69,7 +70,7 @@ static List systemJvmOptions(Settings nodeSettings, final Map= 24) { + enableNativeAccessOptions.add("--illegal-native-access=deny"); + } + } } - return Stream.empty(); + return enableNativeAccessOptions.stream(); } /* diff --git a/docs/changelog/118122.yaml b/docs/changelog/118122.yaml new file mode 100644 index 0000000000000..ca27cc94a7cb2 --- /dev/null +++ b/docs/changelog/118122.yaml @@ -0,0 +1,5 @@ +pr: 118122 +summary: "ES|QL: Partial result on demand for async queries" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/119072.yaml b/docs/changelog/119072.yaml new file mode 100644 index 0000000000000..5b08a214898e2 --- /dev/null +++ b/docs/changelog/119072.yaml @@ -0,0 +1,12 @@ +pr: 119072 +summary: Turn `_source` meta fieldmapper's mode attribute into a no-op +area: Mapping +type: breaking +issues: + - 118596 +breaking: + title: Turn `_source` meta fieldmapper's mode attribute into a no-op + area: Mapping + details: The `mode` mapping attribute of `_source` metadata field mapper has been turned into a no-op. Instead the `index.mapping.source.mode` index setting should be used to configure source mode. + impact: Configuring the `mode` attribute for the `_source` meta field mapper will have no effect on indices created with Elasticsearch 9.0.0 or later. Note that `_source.mode` configured on indices before upgrading to 9.0.0 or later will remain efficive after upgrading. + notable: false diff --git a/docs/changelog/120256.yaml b/docs/changelog/120256.yaml new file mode 100644 index 0000000000000..c4ee5ab1705c5 --- /dev/null +++ b/docs/changelog/120256.yaml @@ -0,0 +1,7 @@ +pr: 120256 +summary: Improve memory aspects of enrich cache +area: Ingest Node +type: enhancement +issues: + - 96050 + - 120021 diff --git a/docs/changelog/120645.yaml b/docs/changelog/120645.yaml new file mode 100644 index 0000000000000..a5ee79de6cb5f --- /dev/null +++ b/docs/changelog/120645.yaml @@ -0,0 +1,6 @@ +pr: 120645 +summary: Esql Support date nanos on date diff function +area: ES|QL +type: enhancement +issues: + - 109999 diff --git a/docs/changelog/120722.yaml b/docs/changelog/120722.yaml new file mode 100644 index 0000000000000..4bdd65b0937e3 --- /dev/null +++ b/docs/changelog/120722.yaml @@ -0,0 +1,5 @@ +pr: 120722 +summary: Migrate stream to core error parsing +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-across-clusters.asciidoc b/docs/reference/esql/esql-across-clusters.asciidoc index 6decc351bc1c8..c12865bad6162 100644 --- a/docs/reference/esql/esql-across-clusters.asciidoc +++ b/docs/reference/esql/esql-across-clusters.asciidoc @@ -210,6 +210,7 @@ Which returns: { "is_running": false, "took": 42, <1> + "is_partial": false, <7> "columns" : [ { "name" : "COUNT(http.response.status_code)", @@ -275,8 +276,9 @@ Which returns: <2> This section of counters shows all possible cluster search states and how many cluster searches are currently in that state. The clusters can have one of the following statuses: *running*, *successful* (searches on all shards were successful), *skipped* (the search -failed on a cluster marked with `skip_unavailable`=`true`) or *failed* (the search -failed on a cluster marked with `skip_unavailable`=`false`). +failed on a cluster marked with `skip_unavailable`=`true`), *failed* (the search +failed on a cluster marked with `skip_unavailable`=`false`) or **partial** (the search was +<> before finishing). <3> The `_clusters/details` section shows metadata about the search on each cluster. <4> If you included indices from the local cluster you sent the request to in your {ccs}, it is identified as "(local)". @@ -285,6 +287,8 @@ which clusters have slower response times than others. <6> The shard details for the search on that cluster, including a count of shards that were skipped due to the can-match phase results. Shards are skipped when they cannot have any matching data and therefore are not included in the full ES|QL query. +<7> The `is_partial` field is set to `true` if the search has partial results for any reason, +for example if it was interrupted before finishing using the <>. The cross-cluster metadata can be used to determine whether any data came back from a cluster. @@ -314,6 +318,7 @@ Which returns: { "is_running": false, "took": 55, + "is_partial": false, "columns": [ ... // not shown ], diff --git a/docs/reference/esql/esql-apis.asciidoc b/docs/reference/esql/esql-apis.asciidoc index 157f4e4357e78..633a202c9dc3d 100644 --- a/docs/reference/esql/esql-apis.asciidoc +++ b/docs/reference/esql/esql-apis.asciidoc @@ -17,6 +17,7 @@ overview of {esql} and related tutorials, see <>. * <> * <> * <> +* <> include::esql-query-api.asciidoc[] @@ -26,3 +27,5 @@ include::esql-async-query-api.asciidoc[] include::esql-async-query-get-api.asciidoc[] include::esql-async-query-delete-api.asciidoc[] + +include::esql-async-query-stop-api.asciidoc[] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc index 8cb974cf6773b..c194818eb0cc7 100644 --- a/docs/reference/esql/esql-async-query-api.asciidoc +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -170,3 +170,10 @@ API>> to get the current status and available results for the query. (Boolean) If `true`, the query request is still executing. -- + +`is_partial`:: ++ +-- +(Boolean) +If `true`, the query has partial results - for example, as a result of using the <>. +-- diff --git a/docs/reference/esql/esql-async-query-stop-api.asciidoc b/docs/reference/esql/esql-async-query-stop-api.asciidoc new file mode 100644 index 0000000000000..dba5282d224ed --- /dev/null +++ b/docs/reference/esql/esql-async-query-stop-api.asciidoc @@ -0,0 +1,49 @@ +[[esql-async-query-stop-api]] +=== {esql} async query stop API +++++ +{esql} async query stop API +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-esql[ES|QL APIs]. +-- + +The <> async query stop API is used to manually stop an async query. Once the stop command is issued, +the query stops processing new data and returns the results that have been already processed. Note that due to the pipelined +nature of {esql} queries, the stop operation is not immediate and may take time to return results. + +The results are returned in <> as the +<>. +If the query has been finished by the time the stop command is issued, the results are returned immediately. + +If the query processing has not finished by the time the stop command is issued, the response will have the `is_partial` +field set to `true`. + +[source,console] +---- +POST /query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=/stop +---- +// TEST[skip: no access to query ID] + +[[esql-async-query-stop-api-request]] +==== {api-request-title} + +`POST /_query/async//stop` + +[[esql-async-query-stop-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, only the authenticated user that submitted the original query request +can stop the query. + +[[esql-async-query-stop-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Identifier for the query to stop. ++ +A query ID is provided in the <>'s +response for a query that does not complete in the awaited time. diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index c353185e2895c..ccdd3227df9e6 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -193,6 +193,7 @@ Which returns: ---- { "took": 28, + "is_partial": false, "columns": [ {"name": "author", "type": "text"}, {"name": "name", "type": "text"}, diff --git a/docs/reference/esql/functions/kibana/definition/date_diff.json b/docs/reference/esql/functions/kibana/definition/date_diff.json index d32028d455348..2738ec8390226 100644 --- a/docs/reference/esql/functions/kibana/definition/date_diff.json +++ b/docs/reference/esql/functions/kibana/definition/date_diff.json @@ -28,6 +28,78 @@ "variadic" : false, "returnType" : "integer" }, + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "keyword", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, { "params" : [ { @@ -51,6 +123,78 @@ ], "variadic" : false, "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "unit", + "type" : "text", + "optional" : false, + "description" : "Time difference unit" + }, + { + "name" : "startTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing a start timestamp" + }, + { + "name" : "endTimestamp", + "type" : "date_nanos", + "optional" : false, + "description" : "A string representing an end timestamp" + } + ], + "variadic" : false, + "returnType" : "integer" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/like.json b/docs/reference/esql/functions/kibana/definition/like.json index 2fcb29622efbd..4a26dca276696 100644 --- a/docs/reference/esql/functions/kibana/definition/like.json +++ b/docs/reference/esql/functions/kibana/definition/like.json @@ -1,6 +1,7 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "operator", + "operator" : "LIKE", "name" : "like", "description" : "Use `LIKE` to filter data based on string patterns using wildcards. `LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index 1ad2c6dba9f81..eb206cb9ddf4d 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -1,7 +1,6 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "operator", - "operator" : ":", + "type" : "eval", "name" : "match", "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json index 665071aed55c4..a67c6b0e45c4a 100644 --- a/docs/reference/esql/functions/kibana/definition/match_operator.json +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -3,7 +3,7 @@ "type" : "operator", "operator" : ":", "name" : "match_operator", - "description" : "Use `MATCH` to perform a <> on the specified field.\nUsing `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nMatch can be used on fields from the text family like <> and <>,\nas well as other field types like keyword, boolean, dates, and numeric types.\n\nFor a simplified syntax, you can use the <> `:` operator instead of `MATCH`.\n\n`MATCH` returns true if the provided query matches the row.", + "description" : "Use the match operator (`:`) to perform a <> on the specified field.\nUsing `:` is equivalent to using the `match` query in the Elasticsearch Query DSL.\n\nThe match operator is equivalent to the <>.\n\nFor using the function syntax, or adding <>, you can use the\n<>.\n\n`:` returns true if the provided query matches the row.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/not_in.json b/docs/reference/esql/functions/kibana/definition/not_in.json new file mode 100644 index 0000000000000..3fa25d793b503 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/not_in.json @@ -0,0 +1,263 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "operator" : "NOT IN", + "name" : "not_in", + "description" : "The `NOT IN` operator allows testing whether a field or expression does *not* equal any element in a list of literals, fields or expressions.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "boolean", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "boolean", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "cartesian_point", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "cartesian_shape", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "double", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "geo_point", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "geo_shape", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "integer", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "ip", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "ip", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "keyword", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "text", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "long", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "keyword", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "text", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "version", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "inlist", + "type" : "version", + "optional" : false, + "description" : "A list of items." + } + ], + "variadic" : true, + "returnType" : "boolean" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/not_like.json b/docs/reference/esql/functions/kibana/definition/not_like.json new file mode 100644 index 0000000000000..bba70d14d7cb7 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/not_like.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "operator" : "NOT LIKE", + "name" : "not_like", + "description" : "Use `NOT LIKE` to filter data based on string patterns using wildcards. `NOT LIKE`\nusually acts on a field placed on the left-hand side of the operator, but it can\nalso act on a constant (literal) expression. The right-hand side of the operator\nrepresents the pattern.\n\nThe following wildcard characters are supported:\n\n* `*` matches zero or more characters.\n* `?` matches one character.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "A literal expression." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "Pattern." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "A literal expression." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "Pattern." + } + ], + "variadic" : true, + "returnType" : "boolean" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/not_rlike.json b/docs/reference/esql/functions/kibana/definition/not_rlike.json new file mode 100644 index 0000000000000..09abd5ab567e8 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/not_rlike.json @@ -0,0 +1,47 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "operator" : "NOT RLIKE", + "name" : "not_rlike", + "description" : "Use `NOT RLIKE` to filter data based on string patterns using using\n<>. `NOT RLIKE` usually acts on a field placed on\nthe left-hand side of the operator, but it can also act on a constant (literal)\nexpression. The right-hand side of the operator represents the pattern.", + "signatures" : [ + { + "params" : [ + { + "name" : "str", + "type" : "keyword", + "optional" : false, + "description" : "A literal value." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "A regular expression." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "str", + "type" : "text", + "optional" : false, + "description" : "A literal value." + }, + { + "name" : "pattern", + "type" : "keyword", + "optional" : false, + "description" : "A regular expression." + } + ], + "variadic" : true, + "returnType" : "boolean" + } + ], + "preview" : false, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/rlike.json b/docs/reference/esql/functions/kibana/definition/rlike.json index 47cbd7800c821..88d6c7d6bb9b2 100644 --- a/docs/reference/esql/functions/kibana/definition/rlike.json +++ b/docs/reference/esql/functions/kibana/definition/rlike.json @@ -1,6 +1,7 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", - "type" : "eval", + "type" : "operator", + "operator" : "RLIKE", "name" : "rlike", "description" : "Use `RLIKE` to filter data based on string patterns using using\n<>. `RLIKE` usually acts on a field placed on\nthe left-hand side of the operator, but it can also act on a constant (literal)\nexpression. The right-hand side of the operator represents the pattern.", "signatures" : [ diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md index 98f55aacde0b8..0624329182f3a 100644 --- a/docs/reference/esql/functions/kibana/docs/match_operator.md +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -3,15 +3,15 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### MATCH_OPERATOR -Use `MATCH` to perform a <> on the specified field. -Using `MATCH` is equivalent to using the `match` query in the Elasticsearch Query DSL. +Use the match operator (`:`) to perform a <> on the specified field. +Using `:` is equivalent to using the `match` query in the Elasticsearch Query DSL. -Match can be used on fields from the text family like <> and <>, -as well as other field types like keyword, boolean, dates, and numeric types. +The match operator is equivalent to the <>. -For a simplified syntax, you can use the <> `:` operator instead of `MATCH`. +For using the function syntax, or adding <>, you can use the +<>. -`MATCH` returns true if the provided query matches the row. +`:` returns true if the provided query matches the row. ``` FROM books diff --git a/docs/reference/esql/functions/kibana/docs/not_in.md b/docs/reference/esql/functions/kibana/docs/not_in.md new file mode 100644 index 0000000000000..e9e5a7b384d1c --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/not_in.md @@ -0,0 +1,7 @@ + + +### NOT_IN +The `NOT IN` operator allows testing whether a field or expression does *not* equal any element in a list of literals, fields or expressions. + diff --git a/docs/reference/esql/functions/kibana/docs/not_like.md b/docs/reference/esql/functions/kibana/docs/not_like.md new file mode 100644 index 0000000000000..fd1cf7a68630f --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/not_like.md @@ -0,0 +1,15 @@ + + +### NOT_LIKE +Use `NOT LIKE` to filter data based on string patterns using wildcards. `NOT LIKE` +usually acts on a field placed on the left-hand side of the operator, but it can +also act on a constant (literal) expression. The right-hand side of the operator +represents the pattern. + +The following wildcard characters are supported: + +* `*` matches zero or more characters. +* `?` matches one character. + diff --git a/docs/reference/esql/functions/kibana/docs/not_rlike.md b/docs/reference/esql/functions/kibana/docs/not_rlike.md new file mode 100644 index 0000000000000..dac23438c1612 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/not_rlike.md @@ -0,0 +1,10 @@ + + +### NOT_RLIKE +Use `NOT RLIKE` to filter data based on string patterns using using +<>. `NOT RLIKE` usually acts on a field placed on +the left-hand side of the operator, but it can also act on a constant (literal) +expression. The right-hand side of the operator represents the pattern. + diff --git a/docs/reference/esql/functions/search.asciidoc b/docs/reference/esql/functions/search.asciidoc index ba399ead8adfc..1be4dee58e234 100644 --- a/docs/reference/esql/functions/search.asciidoc +++ b/docs/reference/esql/functions/search.asciidoc @@ -11,10 +11,13 @@ Returns true if the provided query matches the row. The match operator is equivalent to the <>. +For using the function syntax, or adding <>, you can use the +<>. + [.text-center] image::esql/functions/signature/match_operator.svg[Embedded,opts=inline] -include::types/match.asciidoc[] +include::types/match_operator.asciidoc[] [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc index b0a4818f412ac..b557d5a34258e 100644 --- a/docs/reference/esql/functions/types/date_diff.asciidoc +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -6,5 +6,11 @@ |=== unit | startTimestamp | endTimestamp | result keyword | date | date | integer +keyword | date | date_nanos | integer +keyword | date_nanos | date | integer +keyword | date_nanos | date_nanos | integer text | date | date | integer +text | date | date_nanos | integer +text | date_nanos | date | integer +text | date_nanos | date_nanos | integer |=== diff --git a/docs/reference/esql/functions/types/not_in.asciidoc b/docs/reference/esql/functions/types/not_in.asciidoc new file mode 100644 index 0000000000000..6ed2c250ef0ac --- /dev/null +++ b/docs/reference/esql/functions/types/not_in.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | inlist | result +boolean | boolean | boolean +cartesian_point | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +double | double | boolean +geo_point | geo_point | boolean +geo_shape | geo_shape | boolean +integer | integer | boolean +ip | ip | boolean +keyword | keyword | boolean +keyword | text | boolean +long | long | boolean +text | keyword | boolean +text | text | boolean +version | version | boolean +|=== diff --git a/docs/reference/esql/functions/types/not_like.asciidoc b/docs/reference/esql/functions/types/not_like.asciidoc new file mode 100644 index 0000000000000..fffa6dc0b8371 --- /dev/null +++ b/docs/reference/esql/functions/types/not_like.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | pattern | result +keyword | keyword | boolean +text | keyword | boolean +|=== diff --git a/docs/reference/esql/functions/types/not_rlike.asciidoc b/docs/reference/esql/functions/types/not_rlike.asciidoc new file mode 100644 index 0000000000000..fffa6dc0b8371 --- /dev/null +++ b/docs/reference/esql/functions/types/not_rlike.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +str | pattern | result +keyword | keyword | boolean +text | keyword | boolean +|=== diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 562ea2a2e6b4a..00d9df04a0bc4 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -27,6 +27,7 @@ Multivalued fields come back as a JSON array: ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"} @@ -78,6 +79,7 @@ And {esql} sees that removal: ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "keyword"} @@ -122,6 +124,7 @@ And {esql} also sees that: ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"} @@ -165,6 +168,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "keyword"} @@ -198,6 +202,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, ], @@ -241,6 +246,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"}, @@ -278,6 +284,7 @@ POST /_query ---- { "took": 28, + "is_partial": false, "columns": [ { "name": "a", "type": "long"}, { "name": "b", "type": "long"}, diff --git a/docs/reference/mapping/fields/synthetic-source.asciidoc b/docs/reference/mapping/fields/synthetic-source.asciidoc index ddbefb73f4522..1678441b13bf2 100644 --- a/docs/reference/mapping/fields/synthetic-source.asciidoc +++ b/docs/reference/mapping/fields/synthetic-source.asciidoc @@ -290,7 +290,7 @@ with the number and sizes of arrays present in source of each document, naturall [[synthetic-source-fields-native-list]] ===== Field types that support synthetic source with no storage overhead The following field types support synthetic source using data from <> or ->, and require no additional storage space to construct the `_source` field. +<>, and require no additional storage space to construct the `_source` field. NOTE: If you enable the <> or <> settings, then additional storage is required to store ignored field values for these types. diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index a965590d65ea7..f6448bc455d14 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -182,13 +182,6 @@ public void checkChangeNetworkHandling(Class callerClass) { checkChangeJVMGlobalState(callerClass); } - /** - * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions - */ - public void checkReadSensitiveNetworkInformation(Class callerClass) { - neverEntitled(callerClass, "access sensitive network information"); - } - /** * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions */ diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java new file mode 100644 index 0000000000000..5c1789f3aa66b --- /dev/null +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.nativeaccess; + +public class NativeAccessUtil { + /** + * Enables native access for the provided module. No-op for JDK 21 or before. + */ + public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) {} + + public static boolean isNativeAccessEnabled(Module module) { + return true; + } +} diff --git a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java new file mode 100644 index 0000000000000..34776407f759e --- /dev/null +++ b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/NativeAccessUtil.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.nativeaccess; + +public class NativeAccessUtil { + /** + * Enables native access for the provided module. Available to JDK 22+, required for JDK 24+ when using --illegal-native-access=deny + */ + public static void enableNativeAccess(ModuleLayer.Controller controller, Module module) { + controller.enableNativeAccess(module); + } + + public static boolean isNativeAccessEnabled(Module module) { + return module.isNativeAccessEnabled(); + } +} diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index cce0ef06cbf62..96802ac76295e 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -184,8 +184,9 @@ public void testSimpleChildQuery() throws Exception { assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); - assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(extractValue("join_field.name", source), equalTo("child")); + assertThat(extractValue("join_field.parent", source), equalTo("p1")); }); @@ -197,11 +198,13 @@ public void testSimpleChildQuery() throws Exception { response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); - assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + Map source0 = response.getHits().getAt(0).getSourceAsMap(); + assertThat(extractValue("join_field.name", source0), equalTo("child")); + assertThat(extractValue("join_field.parent", source0), equalTo("p1")); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2"))); - assertThat(extractValue("join_field.name", response.getHits().getAt(1).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", response.getHits().getAt(1).getSourceAsMap()), equalTo("p1")); + Map source1 = response.getHits().getAt(1).getSourceAsMap(); + assertThat(extractValue("join_field.name", source1), equalTo("child")); + assertThat(extractValue("join_field.parent", source1), equalTo("p1")); } ); diff --git a/muted-tests.yml b/muted-tests.yml index c0f4a4430a9bb..596001b5aac1a 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -235,8 +235,17 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/120668 - class: org.elasticsearch.xpack.security.authc.ldap.ADLdapUserSearchSessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/119882 -- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT - issue: https://github.com/elastic/elasticsearch/issues/120691 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncEnrichStopIT + method: testEnrichAfterStop + issue: https://github.com/elastic/elasticsearch/issues/120757 +- class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryIT + method: testStopQuery + issue: https://github.com/elastic/elasticsearch/issues/120767 +- class: org.elasticsearch.search.fieldcaps.FieldCapabilitiesIT + issue: https://github.com/elastic/elasticsearch/issues/120772 +- class: org.elasticsearch.action.search.SearchProgressActionListenerIT + method: testSearchProgressWithHits + issue: https://github.com/elastic/elasticsearch/issues/120671 # Examples: # diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java new file mode 100644 index 0000000000000..f6a8b86f27bec --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SourceModeRollingUpgradeIT.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.index.mapper.SourceFieldMapper; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class SourceModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + public SourceModeRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testConfigureStoredSourceBeforeIndexCreationLegacy() throws IOException { + assumeTrue("testing deprecation warnings and deprecation migrations", getOldClusterTestVersion().before("9.0.0")); + String templateName = "logs@custom"; + if (isOldCluster()) { + var storedSourceMapping = """ + { + "template": { + "settings": { + "index": { + "mode": "logsdb" + } + }, + "mappings": { + "_source": { + "mode": "stored" + } + } + } + }"""; + var putComponentTemplateRequest = new Request("PUT", "/_component_template/" + templateName); + putComponentTemplateRequest.setOptions(expectWarnings(SourceFieldMapper.DEPRECATION_WARNING)); + putComponentTemplateRequest.setJsonEntity(storedSourceMapping); + assertOK(client().performRequest(putComponentTemplateRequest)); + + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } else if (isUpgradedCluster()) { + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } + } + + public void testConfigureStoredSourceWhenIndexIsCreatedLegacy() throws IOException { + assumeTrue("testing deprecation warnings and deprecation migrations", getOldClusterTestVersion().before("9.0.0")); + String templateName = "logs@custom"; + if (isOldCluster()) { + var storedSourceMapping = """ + { + "template": { + "mappings": { + "_source": { + "mode": "stored" + } + } + } + }"""; + var putComponentTemplateRequest = new Request("PUT", "/_component_template/" + templateName); + putComponentTemplateRequest.setOptions(expectWarnings(SourceFieldMapper.DEPRECATION_WARNING)); + putComponentTemplateRequest.setJsonEntity(storedSourceMapping); + assertOK(client().performRequest(putComponentTemplateRequest)); + + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } else if (isUpgradedCluster()) { + var request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client().performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + templateName + "]") + ); + } + } +} diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index e4b46b98cedda..68da320923898 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -87,4 +87,15 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.skipTest("search.highlight/30_max_analyzed_offset/Plain highlighter with max_analyzed_offset < 0 should FAIL", "semantics of test has changed") task.skipTest("indices.create/10_basic/Create lookup index", "default auto_expand_replicas was removed") task.skipTest("indices.create/10_basic/Create lookup index with one shard", "default auto_expand_replicas was removed") + task.skipTest("range/20_synthetic_source/Double range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Float range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Integer range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/IP range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Long range", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("range/20_synthetic_source/Date range Rounding Fixes", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/92_metrics_auto_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/92_metrics_auto_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/91_metrics_no_subobjects/Metrics object indexing with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("index/91_metrics_no_subobjects/Root without subobjects with synthetic source", "_source.mode mapping attribute is no-op since 9.0.0") + task.skipTest("logsdb/10_settings/routing path allowed in logs mode with routing on sort fields", "Unknown feature routing.logsb_route_on_sort_fields") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json new file mode 100644 index 0000000000000..6fbdefef8b689 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query_stop.json @@ -0,0 +1,31 @@ +{ + "esql.async_query_stop": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-stop-api.html", + "description": "Stops a previously submitted async query request given its ID and collects the results." + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_query/async/{id}/stop", + "methods": [ + "POST" + ], + "parts": { + "id": { + "type": "string", + "description": "The async query ID" + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml index 5881ec83ebe85..eb1771ab7f3e3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/91_metrics_no_subobjects.yml @@ -142,9 +142,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic dynamic_templates: - no_subobjects: match: metrics @@ -212,9 +212,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic subobjects: false properties: host.name: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml index 7b8f785a2cb93..9d1a9793b1f55 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/92_metrics_auto_subobjects.yml @@ -139,9 +139,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic dynamic_templates: - no_subobjects: match: metrics @@ -208,9 +208,9 @@ body: index_patterns: test-* template: + settings: + index.mapping.source.mode: synthetic mappings: - _source: - mode: synthetic subobjects: auto properties: host.name: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml index de20f82f8ba2f..22cda05b074c4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/20_synthetic_source.yml @@ -7,9 +7,9 @@ setup: indices.create: index: synthetic_source_test body: + settings: + index.mapping.source.mode: synthetic mappings: - "_source": - "mode": "synthetic" "properties": "integer_range": "type" : "integer_range" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml index 93f1fafa7ab85..4f36514f833dc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/10_synonyms_put.yml @@ -17,10 +17,8 @@ setup: - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 - do: synonyms.get_synonym: @@ -67,10 +65,8 @@ setup: - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 - do: synonyms.get_synonym: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml index 7f545b466e65f..3d3d6d29ab249 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/110_synonyms_invalid.yml @@ -14,10 +14,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml index 9e6af0f471e6e..8ab97b3ec779d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/20_synonyms_get.yml @@ -17,10 +17,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 --- "Get synonyms set": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml index 62e8fe333ce99..ea27267c518a5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml @@ -15,10 +15,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 --- "Delete synonyms set": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index 3815ea2c96c97..e68c93077bdec 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -13,10 +13,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 - do: synonyms.put_synonym: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml index 02757f711f690..c8f463ba5cbe7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/50_synonym_rule_put.yml @@ -17,10 +17,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 --- "Update a synonyms rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml index 9f1aa1d254169..1754467e89b2f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml @@ -17,10 +17,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 --- "Get a synonym rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml index d2c706decf4fd..b24ed799bfd8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/70_synonym_rule_delete.yml @@ -17,10 +17,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 --- "Delete synonym rule": diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml index 965cae551fab2..a5efd61933c7a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml @@ -16,10 +16,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 # Create an index with synonym_filter that uses that synonyms set - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml index 4e6bd83f07955..9543783f0d6a3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/90_synonyms_reloading_for_synset.yml @@ -28,10 +28,8 @@ setup: # This is to ensure that all index shards (write and read) are available. In serverless this can take some time. - do: cluster.health: - index: .synonyms-2 - timeout: 2s + index: .synonyms wait_for_status: green - ignore: 408 # Create my_index1 with synonym_filter that uses synonyms_set1 - do: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 5e7cffcc8ef0d..325e9000db33d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -857,24 +858,22 @@ public void testExtractInnerHitBuildersWithDuplicatePath() throws Exception { public void testSyntheticSource() throws Exception { assertAcked( - prepareCreate("synthetic").setMapping( - jsonBuilder().startObject() - .startObject("_source") - .field("mode", "synthetic") - .endObject() - .startObject("properties") - .startObject("nested") - .field("type", "nested") - .startObject("properties") - .startObject("number") - .field("type", "long") - .field("ignore_malformed", true) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ) + prepareCreate("synthetic").setSettings(Settings.builder().put("index.mapping.source.mode", "synthetic").build()) + .setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("nested") + .field("type", "nested") + .startObject("properties") + .startObject("number") + .field("type", "long") + .field("ignore_malformed", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ) ); ensureGreen("synthetic"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index affa371d92aa9..c246b7cc2f5cc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -655,8 +655,9 @@ public void testFetchFeatures() throws IOException { assertThat(hit.field("field2").getValue(), equalTo(2.71f)); assertThat(hit.field("script").getValue().toString(), equalTo("5")); - assertThat(hit.getSourceAsMap().size(), equalTo(1)); - assertThat(hit.getSourceAsMap().get("text").toString(), equalTo("some text to entertain")); + Map source = hit.getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("text").toString(), equalTo("some text to entertain")); assertEquals("some text to entertain", hit.getFields().get("text").getValue()); assertEquals("some text to entertain", hit.getFields().get("text_stored_lookup").getValue()); } @@ -927,8 +928,9 @@ public void testNestedFetchFeatures() { field = searchHit.field("script"); assertThat(field.getValue().toString(), equalTo("5")); - assertThat(searchHit.getSourceAsMap().size(), equalTo(1)); - assertThat(extractValue("message", searchHit.getSourceAsMap()), equalTo("some comment")); + Map source = searchHit.getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(extractValue("message", source), equalTo("some comment")); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index e39f8df9bad36..edc0c65bc7731 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -490,8 +490,9 @@ public void testNestedMultipleLayers() throws Exception { response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertNotNull(innerHits.getAt(0).getSourceAsMap()); - assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + Map source = innerHits.getAt(0).getSourceAsMap(); + assertNotNull(source); + assertFalse(source.isEmpty()); } ); assertNoFailuresAndResponse( @@ -507,8 +508,9 @@ public void testNestedMultipleLayers() throws Exception { response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertNotNull(innerHits.getAt(0).getSourceAsMap()); - assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); + Map source = innerHits.getAt(0).getSourceAsMap(); + assertNotNull(source); + assertFalse(source.isEmpty()); } ); } @@ -845,16 +847,12 @@ public void testNestedSource() throws Exception { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), - equalTo("fox eat quick") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), - equalTo("fox ate rabbit x y z") - ); + Map source0 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap(); + assertThat(source0.size(), equalTo(1)); + assertThat(source0.get("message"), equalTo("fox eat quick")); + Map source1 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap(); + assertThat(source1.size(), equalTo(1)); + assertThat(source1.get("message"), equalTo("fox ate rabbit x y z")); } ); @@ -866,16 +864,12 @@ public void testNestedSource() throws Exception { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), - equalTo("fox eat quick") - ); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat( - response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), - equalTo("fox ate rabbit x y z") - ); + Map source0 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap(); + assertThat(source0.size(), equalTo(2)); + assertThat(source0.get("message"), equalTo("fox eat quick")); + Map source1 = response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap(); + assertThat(source1.size(), equalTo(2)); + assertThat(source1.get("message"), equalTo("fox ate rabbit x y z")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 353858e9d6974..29f56eeb5ecb3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -47,6 +47,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -152,8 +153,9 @@ public void testWithNullStrings() throws InterruptedException { searchResponse -> { assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + Map source = searchResponse.getHits().getHits()[0].getSourceAsMap(); + assertThat(source.get("field1"), Matchers.equalTo(100)); + assertThat(source.get("field2"), Matchers.equalTo("toto")); } ); } @@ -438,8 +440,9 @@ public void testScrollAndSearchAfterWithBigIndex() { int foundHits = 0; do { for (SearchHit hit : resp.getHits().getHits()) { - assertNotNull(hit.getSourceAsMap()); - final Object timestamp = hit.getSourceAsMap().get("timestamp"); + Map source = hit.getSourceAsMap(); + assertNotNull(source); + final Object timestamp = source.get("timestamp"); assertNotNull(timestamp); assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); foundHits++; @@ -469,8 +472,9 @@ public void testScrollAndSearchAfterWithBigIndex() { do { Object[] after = null; for (SearchHit hit : resp.getHits().getHits()) { - assertNotNull(hit.getSourceAsMap()); - final Object timestamp = hit.getSourceAsMap().get("timestamp"); + Map source = hit.getSourceAsMap(); + assertNotNull(source); + final Object timestamp = source.get("timestamp"); assertNotNull(timestamp); assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); after = hit.getSortValues(); @@ -505,8 +509,9 @@ public void testScrollAndSearchAfterWithBigIndex() { do { Object[] after = null; for (SearchHit hit : resp.getHits().getHits()) { - assertNotNull(hit.getSourceAsMap()); - final Object timestamp = hit.getSourceAsMap().get("timestamp"); + Map source = hit.getSourceAsMap(); + assertNotNull(source); + final Object timestamp = source.get("timestamp"); assertNotNull(timestamp); foundSeqNos.add(((Number) timestamp).longValue()); after = hit.getSortValues(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index f407c14c48c52..7fd31b056779c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -130,10 +130,11 @@ public void testIssue8226() { .setSize(10), response -> { logClusterState(); + Number previous = (Number) response.getHits().getHits()[0].getSourceAsMap().get("entry"); for (int j = 1; j < response.getHits().getHits().length; j++) { Number current = (Number) response.getHits().getHits()[j].getSourceAsMap().get("entry"); - Number previous = (Number) response.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); assertThat(response.toString(), current.intValue(), lessThan(previous.intValue())); + previous = current; } } ); @@ -144,10 +145,11 @@ public void testIssue8226() { .setSize(10), response -> { logClusterState(); + Number previous = (Number) response.getHits().getHits()[0].getSourceAsMap().get("entry"); for (int j = 1; j < response.getHits().getHits().length; j++) { Number current = (Number) response.getHits().getHits()[j].getSourceAsMap().get("entry"); - Number previous = (Number) response.getHits().getHits()[j - 1].getSourceAsMap().get("entry"); assertThat(response.toString(), current.intValue(), greaterThan(previous.intValue())); + previous = current; } } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index 0e7f8b604a8df..0b1d665f4f3e5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -11,6 +11,8 @@ import org.elasticsearch.test.ESIntegTestCase; +import java.util.Map; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.notNullValue; @@ -57,8 +59,9 @@ public void testSourceFiltering() { assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field1"), equalTo("value")); }, prepareSearch("test").setFetchSource("field1", null), prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }) @@ -84,8 +87,9 @@ public void testSourceWithWildcardFiltering() { assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat((String) source.get("field"), equalTo("value")); }, prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 8b21bb54361b6..cb0e91ce57c74 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -356,8 +356,9 @@ public void testSuggestDocumentSourceFiltering() throws Exception { assertThat(option.getText().toString(), equalTo("suggestion" + id)); assertThat(option.getHit(), hasId("" + id)); assertThat(option.getHit(), hasScore((id))); - assertNotNull(option.getHit().getSourceAsMap()); - Set sourceFields = option.getHit().getSourceAsMap().keySet(); + Map source = option.getHit().getSourceAsMap(); + assertNotNull(source); + Set sourceFields = source.keySet(); assertThat(sourceFields, contains("a")); assertThat(sourceFields, not(contains("b"))); id--; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a50f888927d45..8cbdb908f38e5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,6 +163,8 @@ static TransportVersion def(int id) { public static final TransportVersion RESOLVE_CLUSTER_NO_INDEX_EXPRESSION = def(8_829_00_0); public static final TransportVersion ML_ROLLOVER_LEGACY_INDICES = def(8_830_00_0); public static final TransportVersion ADD_INCLUDE_FAILURE_INDICES_OPTION = def(8_831_00_0); + public static final TransportVersion ESQL_RESPONSE_PARTIAL = def(8_832_00_0); + public static final TransportVersion RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN = def(8_833_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 70a7f4c8cad0c..4b13d1642b600 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -507,6 +507,7 @@ void executeRequest( }); final SearchSourceBuilder source = original.source(); + final boolean isExplain = source != null && source.explain() != null && source.explain(); if (shouldOpenPIT(source)) { // disabling shard reordering for request original.setPreFilterShardSize(Integer.MAX_VALUE); @@ -536,7 +537,12 @@ public void onFailure(Exception e) { } else { Rewriteable.rewriteAndFetch( original, - searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, original.pointInTimeBuilder()), + searchService.getRewriteContext( + timeProvider::absoluteStartMillis, + resolvedIndices, + original.pointInTimeBuilder(), + isExplain + ), rewriteListener ); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 496133aea2853..311df05f9b07b 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -32,6 +32,8 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; +import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyParserUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; @@ -57,8 +59,12 @@ import java.security.Permission; import java.security.Security; import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; @@ -219,24 +225,27 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { // load the plugin Java modules and layers now for use in entitlements var modulesBundles = PluginsLoader.loadModulesBundles(nodeEnv.modulesFile()); var pluginsBundles = PluginsLoader.loadPluginsBundles(nodeEnv.pluginsFile()); - var pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles); - bootstrap.setPluginsLoader(pluginsLoader); + + final PluginsLoader pluginsLoader; if (bootstrap.useEntitlements()) { LogManager.getLogger(Elasticsearch.class).info("Bootstrapping Entitlements"); var pluginData = Stream.concat( - pluginsLoader.moduleBundles() - .stream() + modulesBundles.stream() .map(bundle -> new PolicyParserUtils.PluginData(bundle.getDir(), bundle.pluginDescriptor().isModular(), false)), - pluginsLoader.pluginBundles() - .stream() + pluginsBundles.stream() .map(bundle -> new PolicyParserUtils.PluginData(bundle.getDir(), bundle.pluginDescriptor().isModular(), true)) ).toList(); var pluginPolicies = PolicyParserUtils.createPluginPolicies(pluginData); + + pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, findPluginsWithNativeAccess(pluginPolicies)); + var pluginsResolver = PluginsResolver.create(pluginsLoader); EntitlementBootstrap.bootstrap(pluginPolicies, pluginsResolver::resolveClassToPluginName); } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { + // no need to explicitly enable native access for legacy code + pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); // install SM after natives, shutdown hooks, etc. LogManager.getLogger(Elasticsearch.class).info("Bootstrapping java SecurityManager"); org.elasticsearch.bootstrap.Security.configure( @@ -245,8 +254,12 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { args.pidFile() ); } else { + // TODO: should we throw/interrupt startup in this case? + pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); LogManager.getLogger(Elasticsearch.class).warn("Bootstrapping without any protection"); } + + bootstrap.setPluginsLoader(pluginsLoader); } private static void ensureInitialized(Class... classes) { @@ -467,6 +480,19 @@ private static Environment createEnvironment(Path configDir, Settings initialSet return new Environment(builder.build(), configDir); } + static Map> findPluginsWithNativeAccess(Map policies) { + Map> pluginsWithNativeAccess = new HashMap<>(); + for (var kv : policies.entrySet()) { + for (var scope : kv.getValue().scopes()) { + if (scope.entitlements().stream().anyMatch(entitlement -> entitlement instanceof LoadNativeLibrariesEntitlement)) { + var modulesToEnable = pluginsWithNativeAccess.computeIfAbsent(kv.getKey(), k -> new HashSet<>()); + modulesToEnable.add(scope.moduleName()); + } + } + } + return pluginsWithNativeAccess; + } + // -- instance private static volatile Elasticsearch INSTANCE; diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index b3d24343f60ed..8eb0adc5d6e86 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -812,7 +812,8 @@ public QueryRewriteContext newQueryRewriteContext( scriptService, null, null, - null + null, + false ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 69ebcd4ba3fe6..e801a07f11670 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -142,6 +142,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY = def(9_004_00_0, Version.LUCENE_10_0_0); public static final IndexVersion INFERENCE_METADATA_FIELDS = def(9_005_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSB_OPTIONAL_SORTING_ON_HOST_NAME = def(9_006_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP = def(9_007_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 6a06d8ba4df28..a6cb5561f2e7b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -73,6 +73,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -81,6 +82,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -89,6 +91,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -97,6 +100,7 @@ public enum Mode { Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, + false, false ); @@ -149,13 +153,21 @@ public static class Builder extends MetadataFieldMapper.Builder { private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { + private final boolean sourceModeIsNoop; + + public Builder( + IndexMode indexMode, + final Settings settings, + boolean sourceModeIsNoop, + boolean supportsCheckForNonDefaultParams, + boolean serializeMode + ) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.sourceModeIsNoop = sourceModeIsNoop; this.serializeMode = serializeMode; this.mode = new Parameter<>( "mode", @@ -220,7 +232,7 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - if (mode.isConfigured()) { + if (mode.isConfigured() && sourceModeIsNoop == false) { serializeMode = true; } final SourceFieldMapper sourceFieldMapper; @@ -235,7 +247,8 @@ public SourceFieldMapper build() { enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), excludes.getValue().toArray(Strings.EMPTY_ARRAY), - serializeMode + serializeMode, + sourceModeIsNoop ); } if (indexMode != null) { @@ -252,7 +265,7 @@ private Mode resolveSourceMode() { } // If `_source.mode` is not set we need to apply a default according to index mode. - if (mode.get() == null) { + if (mode.get() == null || sourceModeIsNoop) { if (indexMode == null || indexMode == IndexMode.STANDARD) { // Special case to avoid serializing mode. return null; @@ -288,12 +301,20 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (onOrAfterDeprecateModeVersion(c.indexVersionCreated())) { return resolveStaticInstance(settingSourceMode); } else { - return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); + return new SourceFieldMapper( + settingSourceMode, + Explicit.IMPLICIT_TRUE, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + true, + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP) + ); } }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP), c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), onOrAfterDeprecateModeVersion(c.indexVersionCreated()) == false ) @@ -339,6 +360,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { // nullable for bwc reasons - TODO: fold this into serializeMode private final @Nullable Mode mode; private final boolean serializeMode; + private final boolean sourceModeIsNoop; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -348,7 +370,14 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { + private SourceFieldMapper( + Mode mode, + Explicit enabled, + String[] includes, + String[] excludes, + boolean serializeMode, + boolean sourceModeIsNoop + ) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -357,6 +386,7 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.excludes = excludes; this.complete = stored() && sourceFilter == null; this.serializeMode = serializeMode; + this.sourceModeIsNoop = sourceModeIsNoop; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -488,7 +518,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); + return new Builder(null, Settings.EMPTY, sourceModeIsNoop, false, serializeMode).init(this); } public boolean isSynthetic() { diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index a84455ef09bf2..72727b589617b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -105,7 +105,8 @@ public CoordinatorRewriteContext( null, null, null, - null + null, + false ); this.dateFieldRangeInfo = dateFieldRangeInfo; this.tier = tier; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 265a0c52593bd..bc14a31978c18 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -72,6 +72,7 @@ public class QueryRewriteContext { private final ResolvedIndices resolvedIndices; private final PointInTimeBuilder pit; private QueryRewriteInterceptor queryRewriteInterceptor; + private final boolean isExplain; public QueryRewriteContext( final XContentParserConfiguration parserConfiguration, @@ -89,7 +90,8 @@ public QueryRewriteContext( final ScriptCompiler scriptService, final ResolvedIndices resolvedIndices, final PointInTimeBuilder pit, - final QueryRewriteInterceptor queryRewriteInterceptor + final QueryRewriteInterceptor queryRewriteInterceptor, + final boolean isExplain ) { this.parserConfiguration = parserConfiguration; @@ -109,6 +111,7 @@ public QueryRewriteContext( this.resolvedIndices = resolvedIndices; this.pit = pit; this.queryRewriteInterceptor = queryRewriteInterceptor; + this.isExplain = isExplain; } public QueryRewriteContext(final XContentParserConfiguration parserConfiguration, final Client client, final LongSupplier nowInMillis) { @@ -128,7 +131,8 @@ public QueryRewriteContext(final XContentParserConfiguration parserConfiguration null, null, null, - null + null, + false ); } @@ -139,6 +143,18 @@ public QueryRewriteContext( final ResolvedIndices resolvedIndices, final PointInTimeBuilder pit, final QueryRewriteInterceptor queryRewriteInterceptor + ) { + this(parserConfiguration, client, nowInMillis, resolvedIndices, pit, queryRewriteInterceptor, false); + } + + public QueryRewriteContext( + final XContentParserConfiguration parserConfiguration, + final Client client, + final LongSupplier nowInMillis, + final ResolvedIndices resolvedIndices, + final PointInTimeBuilder pit, + final QueryRewriteInterceptor queryRewriteInterceptor, + final boolean isExplain ) { this( parserConfiguration, @@ -156,7 +172,8 @@ public QueryRewriteContext( null, resolvedIndices, pit, - queryRewriteInterceptor + queryRewriteInterceptor, + isExplain ); } @@ -262,6 +279,10 @@ public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) { this.mapUnmappedFieldAsString = mapUnmappedFieldAsString; } + public boolean isExplain() { + return this.isExplain; + } + public NamedWriteableRegistry getWriteableRegistry() { return writeableRegistry; } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index b2ee6134a7728..bf1ac90101d86 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -272,7 +272,8 @@ private SearchExecutionContext( scriptService, null, null, - null + null, + false ); this.shardId = shardId; this.shardRequestIndex = shardRequestIndex; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 0a3baf2c52f57..1df5bddeff9e3 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -1770,7 +1770,19 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set loadPluginsBundles(Path pluginsDirectory) { /** * Constructs a new PluginsLoader * - * @param modules The set of module bundles present on the filesystem - * @param plugins The set of plugin bundles present on the filesystem + * @param modules The set of module bundles present on the filesystem + * @param plugins The set of plugin bundles present on the filesystem + * @param pluginsWithNativeAccess A map plugin name -> set of module names for which we want to enable native access */ - public static PluginsLoader createPluginsLoader(Set modules, Set plugins) { - return createPluginsLoader(modules, plugins, true); + public static PluginsLoader createPluginsLoader( + Set modules, + Set plugins, + Map> pluginsWithNativeAccess + ) { + return createPluginsLoader(modules, plugins, pluginsWithNativeAccess, true); } /** * Constructs a new PluginsLoader * - * @param modules The set of module bundles present on the filesystem - * @param plugins The set of plugin bundles present on the filesystem + * @param modules The set of module bundles present on the filesystem + * @param plugins The set of plugin bundles present on the filesystem + * @param pluginsWithNativeAccess A map plugin name -> set of module names for which we want to enable native access * @param withServerExports {@code true} to add server module exports */ - public static PluginsLoader createPluginsLoader(Set modules, Set plugins, boolean withServerExports) { + public static PluginsLoader createPluginsLoader( + Set modules, + Set plugins, + Map> pluginsWithNativeAccess, + boolean withServerExports + ) { Map> qualifiedExports; if (withServerExports) { qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); @@ -207,7 +220,8 @@ public static PluginsLoader createPluginsLoader(Set modules, Set

systemLoaderURLs = JarHell.parseModulesAndClassPath(); for (PluginBundle bundle : sortedBundles) { PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); - loadPluginLayer(bundle, loadedPluginLayers, qualifiedExports); + var modulesWithNativeAccess = pluginsWithNativeAccess.getOrDefault(bundle.plugin.getName(), Set.of()); + loadPluginLayer(bundle, loadedPluginLayers, qualifiedExports, modulesWithNativeAccess); } } @@ -245,7 +259,8 @@ public Set pluginBundles() { private static void loadPluginLayer( PluginBundle bundle, Map loaded, - Map> qualifiedExports + Map> qualifiedExports, + Set modulesWithNativeAccess ) { String name = bundle.plugin.getName(); logger.debug(() -> "Loading bundle: " + name); @@ -276,7 +291,8 @@ private static void loadPluginLayer( pluginParentLoader, extendedPlugins, spiLayerAndLoader, - qualifiedExports + qualifiedExports, + modulesWithNativeAccess ); final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); @@ -323,7 +339,8 @@ private static LayerAndLoader createPluginLayerAndLoader( ClassLoader pluginParentLoader, List extendedPlugins, LayerAndLoader spiLayerAndLoader, - Map> qualifiedExports + Map> qualifiedExports, + Set modulesWithNativeAccess ) { final PluginDescriptor plugin = bundle.plugin; if (plugin.getModuleName().isPresent()) { @@ -332,7 +349,7 @@ private static LayerAndLoader createPluginLayerAndLoader( Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer) ).toList(); - return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); + return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports, modulesWithNativeAccess); } else if (plugin.isStable()) { logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); return LayerAndLoader.ofUberModuleLoader( @@ -341,7 +358,8 @@ private static LayerAndLoader createPluginLayerAndLoader( ModuleLayer.boot(), "synthetic." + toModuleName(plugin.getName()), bundle.allUrls, - Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules + Set.of("org.elasticsearch.server"), // TODO: instead of denying server, allow only jvm + stable API modules + modulesWithNativeAccess ) ); } else { @@ -363,7 +381,8 @@ static LayerAndLoader createSpiModuleLayer( urlsToPaths(urls), parentLoader, parentLayers, - qualifiedExports + qualifiedExports, + Set.of() ); } @@ -371,7 +390,8 @@ static LayerAndLoader createPluginModuleLayer( PluginBundle bundle, ClassLoader parentLoader, List parentLayers, - Map> qualifiedExports + Map> qualifiedExports, + Set modulesWithNativeAccess ) { assert bundle.plugin.getModuleName().isPresent(); return createModuleLayer( @@ -380,7 +400,8 @@ static LayerAndLoader createPluginModuleLayer( urlsToPaths(bundle.urls), parentLoader, parentLayers, - qualifiedExports + qualifiedExports, + modulesWithNativeAccess ); } @@ -390,7 +411,8 @@ static LayerAndLoader createModuleLayer( Path[] paths, ClassLoader parentLoader, List parentLayers, - Map> qualifiedExports + Map> qualifiedExports, + Set modulesWithNativeAccess ) { logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); var finder = ModuleFinder.of(paths); @@ -408,6 +430,7 @@ static LayerAndLoader createModuleLayer( exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); // configure qualified exports/opens to other modules/plugins addPluginExportsServices(qualifiedExports, controller); + enableNativeAccess(moduleName, modulesWithNativeAccess, controller); logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); } @@ -518,4 +541,18 @@ protected void addOpens(String pkg, Module target) { addExportsService(qualifiedExports, exportsService, module.getName()); } } + + private static void enableNativeAccess(String mainModuleName, Set modulesWithNativeAccess, Controller controller) { + for (var moduleName : modulesWithNativeAccess) { + var module = controller.layer().findModule(moduleName); + module.ifPresentOrElse(m -> NativeAccessUtil.enableNativeAccess(controller, m), () -> { + assert false + : Strings.format( + "Native access not enabled for module [%s]: not a valid module name in layer [%s]", + moduleName, + mainModuleName + ); + }); + } + } } diff --git a/server/src/main/java/org/elasticsearch/plugins/UberModuleClassLoader.java b/server/src/main/java/org/elasticsearch/plugins/UberModuleClassLoader.java index 53ba7b0f2b767..5e63f2e0b9aa9 100644 --- a/server/src/main/java/org/elasticsearch/plugins/UberModuleClassLoader.java +++ b/server/src/main/java/org/elasticsearch/plugins/UberModuleClassLoader.java @@ -10,6 +10,7 @@ package org.elasticsearch.plugins; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.nativeaccess.NativeAccessUtil; import java.io.IOException; import java.io.InputStream; @@ -82,7 +83,7 @@ private static Map> getModuleToServiceMap(ModuleLayer module } static UberModuleClassLoader getInstance(ClassLoader parent, String moduleName, Set jarUrls) { - return getInstance(parent, ModuleLayer.boot(), moduleName, jarUrls, Set.of()); + return getInstance(parent, ModuleLayer.boot(), moduleName, jarUrls, Set.of(), Set.of()); } @SuppressWarnings("removal") @@ -91,7 +92,8 @@ static UberModuleClassLoader getInstance( ModuleLayer parentLayer, String moduleName, Set jarUrls, - Set moduleDenyList + Set moduleDenyList, + Set modulesWithNativeAccess ) { Path[] jarPaths = jarUrls.stream().map(UberModuleClassLoader::urlToPathUnchecked).toArray(Path[]::new); var parentLayerModuleToServiceMap = getModuleToServiceMap(parentLayer); @@ -123,7 +125,8 @@ static UberModuleClassLoader getInstance( jarUrls.toArray(new URL[0]), cf, parentLayer, - packageNames + packageNames, + modulesWithNativeAccess ); return AccessController.doPrivileged(pa); } @@ -147,7 +150,8 @@ private UberModuleClassLoader( URL[] jarURLs, Configuration cf, ModuleLayer mparent, - Set packageNames + Set packageNames, + Set modulesWithNativeAccess ) { super(parent); @@ -159,7 +163,9 @@ private UberModuleClassLoader( // Class::getModule call return the name of our ubermodule. this.moduleController = ModuleLayer.defineModules(cf, List.of(mparent), s -> this); this.module = this.moduleController.layer().findModule(moduleName).orElseThrow(); - + for (var name : modulesWithNativeAccess) { + moduleController.layer().findModule(name).ifPresent(m -> NativeAccessUtil.enableNativeAccess(moduleController, m)); + } this.packageNames = packageNames; } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 701d451ac2c14..8a70f8a7f41a6 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -104,7 +104,8 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted private transient String index; private transient String clusterAlias; - private Map sourceAsMap; + // For asserting that the method #getSourceAsMap is called just once on the lifetime of this object + private boolean sourceAsMapCalled = false; private Map innerHits; @@ -142,7 +143,6 @@ private SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, null, null, null, - null, new HashMap<>(), new HashMap<>(), refCounted @@ -166,7 +166,6 @@ public SearchHit( SearchShardTarget shard, String index, String clusterAlias, - Map sourceAsMap, Map innerHits, Map documentFields, Map metaFields, @@ -188,7 +187,6 @@ public SearchHit( this.shard = shard; this.index = index; this.clusterAlias = clusterAlias; - this.sourceAsMap = sourceAsMap; this.innerHits = innerHits; this.documentFields = documentFields; this.metaFields = metaFields; @@ -279,7 +277,6 @@ public static SearchHit readFrom(StreamInput in, boolean pooled) throws IOExcept shardTarget, index, clusterAlias, - null, innerHits, documentFields, metaFields, @@ -447,7 +444,6 @@ public BytesReference getSourceRef() { */ public SearchHit sourceRef(BytesReference source) { this.source = source; - this.sourceAsMap = null; return this; } @@ -476,19 +472,18 @@ public String getSourceAsString() { } /** - * The source of the document as a map (can be {@code null}). + * The source of the document as a map (can be {@code null}). This method is expected + * to be called at most once during the lifetime of the object as the generated map + * is expensive to generate and it does not get cache. */ public Map getSourceAsMap() { assert hasReferences(); + assert sourceAsMapCalled == false : "getSourceAsMap() called twice"; + sourceAsMapCalled = true; if (source == null) { return null; } - if (sourceAsMap != null) { - return sourceAsMap; - } - - sourceAsMap = Source.fromBytes(source).source(); - return sourceAsMap; + return Source.fromBytes(source).source(); } /** @@ -758,7 +753,6 @@ public SearchHit asUnpooled() { shard, index, clusterAlias, - sourceAsMap, innerHits == null ? null : innerHits.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().asUnpooled())), diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 9284bc594a265..7f3747d321972 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -1892,7 +1892,19 @@ private void rewriteAndFetchShardRequest(IndexShard shard, ShardSearchRequest re * Returns a new {@link QueryRewriteContext} with the given {@code now} provider */ public QueryRewriteContext getRewriteContext(LongSupplier nowInMillis, ResolvedIndices resolvedIndices, PointInTimeBuilder pit) { - return indicesService.getRewriteContext(nowInMillis, resolvedIndices, pit); + return getRewriteContext(nowInMillis, resolvedIndices, pit, false); + } + + /** + * Returns a new {@link QueryRewriteContext} with the given {@code now} provider + */ + public QueryRewriteContext getRewriteContext( + LongSupplier nowInMillis, + ResolvedIndices resolvedIndices, + PointInTimeBuilder pit, + final boolean isExplain + ) { + return indicesService.getRewriteContext(nowInMillis, resolvedIndices, pit, isExplain); } public CoordinatorRewriteContextProvider getCoordinatorRewriteContextProvider(LongSupplier nowInMillis) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 8ff381cbbc84d..7e7e10c48ea14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -235,6 +235,8 @@ public Object getProperty(List path) { } else if (tokens[0].equals(SCORE)) { return topHit.getScore(); } else if (tokens[0].equals(SOURCE)) { + // Caching the map might help here but memory usage is a concern for this class + // This is dead code, pipeline aggregations do not support _source.field. Map sourceAsMap = topHit.getSourceAsMap(); if (sourceAsMap != null) { Object property = sourceAsMap.get(tokens[1]); diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 298340e5c579e..8403031bc65f5 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -78,7 +78,7 @@ public T addChild(RetrieverBuilder retrieverBuilder) { /** * Combines the provided {@code rankResults} to return the final top documents. */ - protected abstract RankDoc[] combineInnerRetrieverResults(List rankResults); + protected abstract RankDoc[] combineInnerRetrieverResults(List rankResults, boolean explain); @Override public final boolean isCompound() { @@ -181,7 +181,7 @@ public void onResponse(MultiSearchResponse items) { failures.forEach(ex::addSuppressed); listener.onFailure(ex); } else { - results.set(combineInnerRetrieverResults(topDocs)); + results.set(combineInnerRetrieverResults(topDocs, ctx.isExplain())); listener.onResponse(null); } } diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java index 09688b5b9b001..4531beef7125d 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RescorerRetrieverBuilder.java @@ -148,7 +148,7 @@ protected RescorerRetrieverBuilder clone(List newChildRetriever } @Override - protected RankDoc[] combineInnerRetrieverResults(List rankResults) { + protected RankDoc[] combineInnerRetrieverResults(List rankResults, boolean explain) { assert rankResults.size() == 1; ScoreDoc[] scoreDocs = rankResults.getFirst(); RankDoc[] rankDocs = new RankDoc[scoreDocs.length]; diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index faafdf7d71e33..5eb51d3cadcc6 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; @@ -175,7 +176,7 @@ public TcpTransport( channel, requestId, TransportHandshaker.HANDSHAKE_ACTION_NAME, - new TransportHandshaker.HandshakeRequest(version), + new TransportHandshaker.HandshakeRequest(version, Build.current().version()), TransportRequestOptions.EMPTY, v, null, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index 4eb16f327a5e7..a5973e4001444 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -9,6 +9,7 @@ package org.elasticsearch.transport; +import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; @@ -19,10 +20,12 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.threadpool.ThreadPool; import java.io.EOFException; import java.io.IOException; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -206,7 +209,7 @@ void handleHandshake(TransportChannel channel, long requestId, StreamInput strea assert ignoreDeserializationErrors : exception; throw exception; } - channel.sendResponse(new HandshakeResponse(this.version)); + channel.sendResponse(new HandshakeResponse(this.version, Build.current().version())); } TransportResponseHandler removeHandlerForHandshake(long requestId) { @@ -245,7 +248,7 @@ public Executor executor() { @Override public void handleResponse(HandshakeResponse response) { if (isDone.compareAndSet(false, true)) { - TransportVersion responseVersion = response.responseVersion; + TransportVersion responseVersion = response.transportVersion; if (TransportVersion.isCompatible(responseVersion) == false) { listener.onFailure( new IllegalStateException( @@ -257,7 +260,7 @@ public void handleResponse(HandshakeResponse response) { ) ); } else { - listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getResponseVersion())); + listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getTransportVersion())); } } } @@ -278,12 +281,23 @@ void handleLocalException(TransportException e) { static final class HandshakeRequest extends TransportRequest { - private final TransportVersion version; + /** + * The {@link TransportVersion#current()} of the requesting node. + */ + final TransportVersion transportVersion; - HandshakeRequest(TransportVersion version) { - this.version = version; + /** + * The {@link Build#version()} of the requesting node, as a {@link String}, for better reporting of handshake failures due to + * an incompatible version. + */ + final String releaseVersion; + + HandshakeRequest(TransportVersion transportVersion, String releaseVersion) { + this.transportVersion = Objects.requireNonNull(transportVersion); + this.releaseVersion = Objects.requireNonNull(releaseVersion); } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remainingMessage == null is invalid in v9 HandshakeRequest(StreamInput streamInput) throws IOException { super(streamInput); BytesReference remainingMessage; @@ -293,10 +307,16 @@ static final class HandshakeRequest extends TransportRequest { remainingMessage = null; } if (remainingMessage == null) { - version = null; + transportVersion = null; + releaseVersion = null; } else { try (StreamInput messageStreamInput = remainingMessage.streamInput()) { - this.version = TransportVersion.readVersion(messageStreamInput); + this.transportVersion = TransportVersion.readVersion(messageStreamInput); + if (streamInput.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + this.releaseVersion = messageStreamInput.readString(); + } else { + this.releaseVersion = this.transportVersion.toReleaseVersion(); + } } } } @@ -304,42 +324,84 @@ static final class HandshakeRequest extends TransportRequest { @Override public void writeTo(StreamOutput streamOutput) throws IOException { super.writeTo(streamOutput); - assert version != null; - try (BytesStreamOutput messageStreamOutput = new BytesStreamOutput(4)) { - TransportVersion.writeVersion(version, messageStreamOutput); + assert transportVersion != null; + try (BytesStreamOutput messageStreamOutput = new BytesStreamOutput(1024)) { + TransportVersion.writeVersion(transportVersion, messageStreamOutput); + if (streamOutput.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + messageStreamOutput.writeString(releaseVersion); + } // else we just send the transport version and rely on a best-effort mapping to release versions BytesReference reference = messageStreamOutput.bytes(); streamOutput.writeBytesReference(reference); } } } + /** + * A response to a low-level transport handshake, carrying information about the version of the responding node. + */ static final class HandshakeResponse extends TransportResponse { - private final TransportVersion responseVersion; + /** + * The {@link TransportVersion#current()} of the responding node. + */ + private final TransportVersion transportVersion; - HandshakeResponse(TransportVersion responseVersion) { - this.responseVersion = responseVersion; + /** + * The {@link Build#version()} of the responding node, as a {@link String}, for better reporting of handshake failures due to + * an incompatible version. + */ + private final String releaseVersion; + + HandshakeResponse(TransportVersion transportVersion, String releaseVersion) { + this.transportVersion = Objects.requireNonNull(transportVersion); + this.releaseVersion = Objects.requireNonNull(releaseVersion); } - private HandshakeResponse(StreamInput in) throws IOException { + HandshakeResponse(StreamInput in) throws IOException { super(in); - responseVersion = TransportVersion.readVersion(in); + transportVersion = TransportVersion.readVersion(in); + if (in.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + releaseVersion = in.readString(); + } else { + releaseVersion = transportVersion.toReleaseVersion(); + } } @Override public void writeTo(StreamOutput out) throws IOException { - assert responseVersion != null; - TransportVersion.writeVersion(responseVersion, out); + TransportVersion.writeVersion(transportVersion, out); + if (out.getTransportVersion().onOrAfter(V9_HANDSHAKE_VERSION)) { + out.writeString(releaseVersion); + } // else we just send the transport version and rely on a best-effort mapping to release versions + } + + /** + * @return the {@link TransportVersion#current()} of the responding node. + */ + TransportVersion getTransportVersion() { + return transportVersion; } - TransportVersion getResponseVersion() { - return responseVersion; + /** + * @return the {@link Build#version()} of the responding node, as a {@link String}, for better reporting of handshake failures due + * to an incompatible version. + */ + String getReleaseVersion() { + return releaseVersion; } } @FunctionalInterface interface HandshakeRequestSender { - - void sendRequest(DiscoveryNode node, TcpChannel channel, long requestId, TransportVersion version) throws IOException; + /** + * @param node The (expected) remote node, for error reporting and passing to + * {@link TransportMessageListener#onRequestSent}. + * @param channel The TCP channel to use to send the handshake request. + * @param requestId The transport request ID, for matching up the response. + * @param handshakeTransportVersion The {@link TransportVersion} to use for the handshake request, which will be + * {@link TransportHandshaker#V8_HANDSHAKE_VERSION} in production. + */ + void sendRequest(DiscoveryNode node, TcpChannel channel, long requestId, TransportVersion handshakeTransportVersion) + throws IOException; } } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 450bb1e08940e..8196c01ee8bbc 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -130,6 +130,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -1743,7 +1744,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { NodeClient client = new NodeClient(settings, threadPool); SearchService searchService = mock(SearchService.class); - when(searchService.getRewriteContext(any(), any(), any())).thenReturn( + when(searchService.getRewriteContext(any(), any(), any(), anyBoolean())).thenReturn( new QueryRewriteContext(null, null, null, null, null, null) ); ClusterService clusterService = new ClusterService( diff --git a/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java new file mode 100644 index 0000000000000..d84ee0267251a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/bootstrap/ElasticsearchTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.LoadNativeLibrariesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.Policy; +import org.elasticsearch.entitlement.runtime.policy.Scope; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static java.util.Map.entry; +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ElasticsearchTests extends ESTestCase { + public void testFindPluginsWithNativeAccess() { + + var policies = Map.ofEntries( + entry( + "plugin-with-native", + new Policy( + "policy", + List.of( + new Scope("module.a", List.of(new LoadNativeLibrariesEntitlement())), + new Scope("module.b", List.of(new InboundNetworkEntitlement())) + ) + ) + ), + entry( + "another-plugin-with-native", + new Policy( + "policy", + List.of( + new Scope("module.a2", List.of(new LoadNativeLibrariesEntitlement())), + new Scope("module.b2", List.of(new LoadNativeLibrariesEntitlement())), + new Scope("module.c2", List.of(new InboundNetworkEntitlement())) + + ) + ) + ), + entry( + "plugin-without-native", + new Policy( + "policy", + List.of( + new Scope("module.a3", List.of(new InboundNetworkEntitlement())), + new Scope("module.b3", List.of(new OutboundNetworkEntitlement())) + ) + ) + ) + ); + + var pluginsWithNativeAccess = Elasticsearch.findPluginsWithNativeAccess(policies); + + assertThat(pluginsWithNativeAccess.keySet(), containsInAnyOrder("plugin-with-native", "another-plugin-with-native")); + assertThat(pluginsWithNativeAccess.get("plugin-with-native"), containsInAnyOrder("module.a")); + assertThat(pluginsWithNativeAccess.get("another-plugin-with-native"), containsInAnyOrder("module.a2", "module.b2")); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index be36ab9d6eac1..5da7d6100bf4b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -101,9 +101,6 @@ public void testCreateDynamicMapperBuilderContext() throws IOException { var mapping = XContentBuilder.builder(XContentType.JSON.xContent()) .startObject() .startObject("_doc") - .startObject("_source") - .field("mode", "synthetic") - .endObject() .startObject(DataStreamTimestampFieldMapper.NAME) .field("enabled", "true") .endObject() @@ -120,6 +117,11 @@ public void testCreateDynamicMapperBuilderContext() throws IOException { .endObject() .endObject(); var documentMapper = new MapperServiceTestCase() { + + @Override + protected Settings getIndexSettings() { + return Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + } }.createDocumentMapper(mapping); var parserContext = new TestDocumentParserContext(documentMapper.mappers(), null); parserContext.path().add("foo"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index d4d0e67ff4141..58e173d1ee45f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index bc560d94b8f52..8ad37908b2e9c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -64,18 +64,10 @@ protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerUpdateCheck( topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), - dm -> { - assertTrue(dm.metadataMapper(SourceFieldMapper.class).isSynthetic()); - } + dm -> {} ); checker.registerConflictCheck("includes", b -> b.array("includes", "foo*")); checker.registerConflictCheck("excludes", b -> b.array("excludes", "foo*")); - checker.registerConflictCheck( - "mode", - topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject()), - topMapping(b -> b.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject()), - d -> {} - ); } public void testNoFormat() throws Exception { @@ -219,23 +211,45 @@ public void testSyntheticUpdates() throws Exception { """); SourceFieldMapper mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); - assertTrue(mapper.isSynthetic()); + assertFalse("mode is a noop parameter", mapper.isSynthetic()); merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "synthetic" } } } """); mapper = mapperService.documentMapper().sourceMapper(); assertTrue(mapper.enabled()); - assertTrue(mapper.isSynthetic()); + assertFalse("mode is a noop parameter", mapper.isSynthetic()); ParsedDocument doc = mapperService.documentMapper().parse(source("{}")); assertNull(doc.rootDoc().get(SourceFieldMapper.NAME)); - Exception e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, """ - { "_doc" : { "_source" : { "mode" : "stored" } } } - """)); + merge(mapperService, """ + { "_doc" : { "_source" : { "mode" : "disabled" } } } + """); + + mapper = mapperService.documentMapper().sourceMapper(); + assertTrue("mode is a noop parameter", mapper.enabled()); + assertFalse("mode is a noop parameter", mapper.isSynthetic()); + } + + public void testSyntheticUpdatesLegacy() throws Exception { + var mappings = XContentBuilder.builder(XContentType.JSON.xContent()).startObject().startObject("_doc").startObject("_source"); + mappings.field("mode", "synthetic").endObject().endObject().endObject(); + var version = IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_MODE_ATTRIBUTE_NOOP); + MapperService mapperService = createMapperService(version, mappings); + SourceFieldMapper mapper = mapperService.documentMapper().sourceMapper(); + assertTrue(mapper.enabled()); + assertTrue(mapper.isSynthetic()); - assertThat(e.getMessage(), containsString("Cannot update parameter [mode] from [synthetic] to [stored]")); + merge(mapperService, """ + { "_doc" : { "_source" : { "mode" : "synthetic" } } } + """); + mapper = mapperService.documentMapper().sourceMapper(); + assertTrue(mapper.enabled()); + assertTrue(mapper.isSynthetic()); + + ParsedDocument doc = mapperService.documentMapper().parse(source("{}")); + assertNull(doc.rootDoc().get(SourceFieldMapper.NAME)); merge(mapperService, """ { "_doc" : { "_source" : { "mode" : "disabled" } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java index 5dd231ab97886..b997ac4747a07 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java @@ -53,7 +53,8 @@ public void testGetTierPreference() { null, null, null, - null + null, + false ); assertThat(context.getTierPreference(), is("data_cold")); @@ -81,7 +82,8 @@ public void testGetTierPreference() { null, null, null, - null + null, + false ); assertThat(context.getTierPreference(), is(nullValue())); diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index dc70c44a89128..0c31ab703862f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index a49d895f38f67..532e30804947c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -92,7 +92,7 @@ public void testGetFromTranslogWithStringSourceMappingOptionsAndStoredFields() t """; boolean noSource = randomBoolean(); String sourceOptions = noSource ? "\"enabled\": false" : randomBoolean() ? "\"excludes\": [\"fo*\"]" : "\"includes\": [\"ba*\"]"; - runGetFromTranslogWithOptions(docToIndex, sourceOptions, noSource ? "" : "{\"bar\":\"bar\"}", "\"text\"", "foo", false); + runGetFromTranslogWithOptions(docToIndex, sourceOptions, null, noSource ? "" : "{\"bar\":\"bar\"}", "\"text\"", "foo", false); } public void testGetFromTranslogWithLongSourceMappingOptionsAndStoredFields() throws IOException { @@ -101,7 +101,7 @@ public void testGetFromTranslogWithLongSourceMappingOptionsAndStoredFields() thr """; boolean noSource = randomBoolean(); String sourceOptions = noSource ? "\"enabled\": false" : randomBoolean() ? "\"excludes\": [\"fo*\"]" : "\"includes\": [\"ba*\"]"; - runGetFromTranslogWithOptions(docToIndex, sourceOptions, noSource ? "" : "{\"bar\":42}", "\"long\"", 7L, false); + runGetFromTranslogWithOptions(docToIndex, sourceOptions, null, noSource ? "" : "{\"bar\":42}", "\"long\"", 7L, false); } public void testGetFromTranslogWithSyntheticSource() throws IOException { @@ -110,10 +110,8 @@ public void testGetFromTranslogWithSyntheticSource() throws IOException { """; String expectedFetchedSource = """ {"bar":42,"foo":7}"""; - String sourceOptions = """ - "mode": "synthetic" - """; - runGetFromTranslogWithOptions(docToIndex, sourceOptions, expectedFetchedSource, "\"long\"", 7L, true); + var settings = Settings.builder().put("index.mapping.source.mode", "synthetic").build(); + runGetFromTranslogWithOptions(docToIndex, "", settings, expectedFetchedSource, "\"long\"", 7L, true); } public void testGetFromTranslogWithDenseVector() throws IOException { @@ -127,12 +125,13 @@ public void testGetFromTranslogWithDenseVector() throws IOException { "foo": "foo" } """, Arrays.toString(vector)); - runGetFromTranslogWithOptions(docToIndex, "\"enabled\": true", docToIndex, "\"text\"", "foo", "\"dense_vector\"", false); + runGetFromTranslogWithOptions(docToIndex, "\"enabled\": true", null, docToIndex, "\"text\"", "foo", "\"dense_vector\"", false); } private void runGetFromTranslogWithOptions( String docToIndex, String sourceOptions, + Settings settings, String expectedResult, String fieldType, Object expectedFooVal, @@ -141,6 +140,7 @@ private void runGetFromTranslogWithOptions( runGetFromTranslogWithOptions( docToIndex, sourceOptions, + settings, expectedResult, fieldType, expectedFooVal, @@ -152,28 +152,30 @@ private void runGetFromTranslogWithOptions( private void runGetFromTranslogWithOptions( String docToIndex, String sourceOptions, + Settings additionalSettings, String expectedResult, String fieldTypeFoo, Object expectedFooVal, String fieldTypeBar, boolean sourceOnlyFetchCreatesInMemoryReader ) throws IOException { - IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping(Strings.format(""" - { - "properties": { - "foo": { - "type": %s, - "store": true - }, - "bar": { "type": %s } - }, - "_source": { %s } - } - }""", fieldTypeFoo, fieldTypeBar, sourceOptions)) - .settings(indexSettings(IndexVersion.current(), 1, 1)) - .primaryTerm(0, 1) - .build(); + + var indexSettingsBuilder = indexSettings(IndexVersion.current(), 1, 1); + if (additionalSettings != null) { + indexSettingsBuilder.put(additionalSettings); + } + IndexMetadata metadata = IndexMetadata.builder("test").putMapping(Strings.format(""" + { + "properties": { + "foo": { + "type": %s, + "store": true + }, + "bar": { "type": %s } + }, + "_source": { %s } + } + }""", fieldTypeFoo, fieldTypeBar, sourceOptions)).settings(indexSettingsBuilder).primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, EngineTestCase.randomReaderWrapper()); recoverShardFromStore(primary); LongSupplier translogInMemorySegmentCount = ((InternalEngine) primary.getEngine()).translogInMemorySegmentsCount::get; diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java index 2c3f7626e1013..97158e27b8528 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.nativeaccess.NativeAccessUtil; import org.elasticsearch.plugin.analysis.CharFilterFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.PrivilegedOperations; @@ -31,8 +32,7 @@ import java.util.Set; import static java.util.Map.entry; -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.contains; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -44,11 +44,16 @@ public class PluginsLoaderTests extends ESTestCase { private static final Logger logger = LogManager.getLogger(PluginsLoaderTests.class); + public static final String STABLE_PLUGIN_NAME = "stable-plugin"; + public static final String STABLE_PLUGIN_MODULE_NAME = "synthetic.stable.plugin"; + public static final String MODULAR_PLUGIN_NAME = "modular-plugin"; + public static final String MODULAR_PLUGIN_MODULE_NAME = "modular.plugin"; static PluginsLoader newPluginsLoader(Settings settings) { return PluginsLoader.createPluginsLoader( Set.of(), PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + Map.of(), false ); } @@ -70,64 +75,30 @@ public void testToModuleName() { public void testStablePluginLoading() throws Exception { final Path home = createTempDir(); final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); - final Path plugins = home.resolve("plugins"); - final Path plugin = plugins.resolve("stable-plugin"); - Files.createDirectories(plugin); - PluginTestUtil.writeStablePluginProperties( - plugin, - "description", - "description", - "name", - "stable-plugin", - "version", - "1.0.0", - "elasticsearch.version", - Version.CURRENT.toString(), - "java.version", - System.getProperty("java.specification.version") - ); - - Path jar = plugin.resolve("impl.jar"); - JarUtils.createJarWithEntries(jar, Map.of("p/A.class", InMemoryJavaCompiler.compile("p.A", """ - package p; - import java.util.Map; - import org.elasticsearch.plugin.analysis.CharFilterFactory; - import org.elasticsearch.plugin.NamedComponent; - import java.io.Reader; - @NamedComponent( "a_name") - public class A implements CharFilterFactory { - @Override - public Reader create(Reader reader) { - return reader; - } - } - """))); - Path namedComponentFile = plugin.resolve("named_components.json"); - Files.writeString(namedComponentFile, """ - { - "org.elasticsearch.plugin.analysis.CharFilterFactory": { - "a_name": "p.A" - } - } - """); + createStablePlugin(home); var pluginsLoader = newPluginsLoader(settings); try { var loadedLayers = pluginsLoader.pluginLayers().toList(); assertThat(loadedLayers, hasSize(1)); - assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo("stable-plugin")); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo(STABLE_PLUGIN_NAME)); assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isStable(), is(true)); assertThat(pluginsLoader.pluginDescriptors(), hasSize(1)); - assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo("stable-plugin")); + assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo(STABLE_PLUGIN_NAME)); assertThat(pluginsLoader.pluginDescriptors().get(0).isStable(), is(true)); var pluginClassLoader = loadedLayers.get(0).pluginClassLoader(); var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); assertThat(pluginClassLoader, instanceOf(UberModuleClassLoader.class)); assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); - assertThat(pluginModuleLayer.modules(), contains(transformedMatch(Module::getName, equalTo("synthetic.stable.plugin")))); + + var module = pluginModuleLayer.findModule(STABLE_PLUGIN_MODULE_NAME); + assertThat(module, isPresent()); + if (Runtime.version().feature() >= 22) { + assertThat(NativeAccessUtil.isNativeAccessEnabled(module.get()), is(false)); + } if (CharFilterFactory.class.getModule().isNamed() == false) { // test frameworks run with stable api classes on classpath, so we @@ -137,7 +108,37 @@ public Reader create(Reader reader) { } Class stableClass = pluginClassLoader.loadClass("p.A"); - assertThat(stableClass.getModule().getName(), equalTo("synthetic.stable.plugin")); + assertThat(stableClass.getModule().getName(), equalTo(STABLE_PLUGIN_MODULE_NAME)); + } finally { + closePluginLoaders(pluginsLoader); + } + } + + public void testStablePluginWithNativeAccess() throws Exception { + final Path home = createTempDir(); + final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + createStablePlugin(home); + + var pluginsLoader = PluginsLoader.createPluginsLoader( + Set.of(), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + Map.of(STABLE_PLUGIN_NAME, Set.of(STABLE_PLUGIN_MODULE_NAME)), + false + ); + try { + var loadedLayers = pluginsLoader.pluginLayers().toList(); + + assertThat(loadedLayers, hasSize(1)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo(STABLE_PLUGIN_NAME)); + + var pluginClassLoader = loadedLayers.get(0).pluginClassLoader(); + var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); + assertThat(pluginClassLoader, instanceOf(UberModuleClassLoader.class)); + assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); + + var module = pluginModuleLayer.findModule(STABLE_PLUGIN_MODULE_NAME); + assertThat(module, isPresent()); + assertThat(NativeAccessUtil.isNativeAccessEnabled(module.get()), is(true)); } finally { closePluginLoaders(pluginsLoader); } @@ -146,19 +147,77 @@ public Reader create(Reader reader) { public void testModularPluginLoading() throws Exception { final Path home = createTempDir(); final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + createModularPlugin(home); + + var pluginsLoader = newPluginsLoader(settings); + try { + var loadedLayers = pluginsLoader.pluginLayers().toList(); + + assertThat(loadedLayers, hasSize(1)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo(MODULAR_PLUGIN_NAME)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isStable(), is(false)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isModular(), is(true)); + + assertThat(pluginsLoader.pluginDescriptors(), hasSize(1)); + assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo(MODULAR_PLUGIN_NAME)); + assertThat(pluginsLoader.pluginDescriptors().get(0).isModular(), is(true)); + + var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); + assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); + + var module = pluginModuleLayer.findModule(MODULAR_PLUGIN_MODULE_NAME); + assertThat(module, isPresent()); + if (Runtime.version().feature() >= 22) { + assertThat(NativeAccessUtil.isNativeAccessEnabled(module.get()), is(false)); + } + } finally { + closePluginLoaders(pluginsLoader); + } + } + + public void testModularPluginLoadingWithNativeAccess() throws Exception { + final Path home = createTempDir(); + final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + createModularPlugin(home); + + var pluginsLoader = PluginsLoader.createPluginsLoader( + Set.of(), + PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + Map.of(MODULAR_PLUGIN_NAME, Set.of(MODULAR_PLUGIN_MODULE_NAME)), + false + ); + try { + var loadedLayers = pluginsLoader.pluginLayers().toList(); + + assertThat(loadedLayers, hasSize(1)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo(MODULAR_PLUGIN_NAME)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isModular(), is(true)); + + var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); + assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); + + var module = pluginModuleLayer.findModule(MODULAR_PLUGIN_MODULE_NAME); + assertThat(module, isPresent()); + assertThat(NativeAccessUtil.isNativeAccessEnabled(module.get()), is(true)); + } finally { + closePluginLoaders(pluginsLoader); + } + } + + private static void createModularPlugin(Path home) throws IOException { final Path plugins = home.resolve("plugins"); - final Path plugin = plugins.resolve("modular-plugin"); + final Path plugin = plugins.resolve(MODULAR_PLUGIN_NAME); Files.createDirectories(plugin); PluginTestUtil.writePluginProperties( plugin, "description", "description", "name", - "modular-plugin", + MODULAR_PLUGIN_NAME, "classname", "p.A", "modulename", - "modular.plugin", + MODULAR_PLUGIN_MODULE_NAME, "version", "1.0.0", "elasticsearch.version", @@ -168,42 +227,25 @@ public void testModularPluginLoading() throws Exception { ); Path jar = plugin.resolve("impl.jar"); - Map sources = Map.ofEntries(entry("module-info", "module modular.plugin { exports p; }"), entry("p.A", """ - package p; - import org.elasticsearch.plugins.Plugin; + Map sources = Map.ofEntries( + entry("module-info", "module " + MODULAR_PLUGIN_MODULE_NAME + " { exports p; }"), + entry("p.A", """ + package p; + import org.elasticsearch.plugins.Plugin; - public class A extends Plugin { - } - """)); + public class A extends Plugin { + } + """) + ); // Usually org.elasticsearch.plugins.Plugin would be in the org.elasticsearch.server module. // Unfortunately, as tests run non-modular, it will be in the unnamed module, so we need to add a read for it. - var classToBytes = InMemoryJavaCompiler.compile(sources, "--add-reads", "modular.plugin=ALL-UNNAMED"); + var classToBytes = InMemoryJavaCompiler.compile(sources, "--add-reads", MODULAR_PLUGIN_MODULE_NAME + "=ALL-UNNAMED"); JarUtils.createJarWithEntries( jar, Map.ofEntries(entry("module-info.class", classToBytes.get("module-info")), entry("p/A.class", classToBytes.get("p.A"))) ); - - var pluginsLoader = newPluginsLoader(settings); - try { - var loadedLayers = pluginsLoader.pluginLayers().toList(); - - assertThat(loadedLayers, hasSize(1)); - assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo("modular-plugin")); - assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isStable(), is(false)); - assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isModular(), is(true)); - - assertThat(pluginsLoader.pluginDescriptors(), hasSize(1)); - assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo("modular-plugin")); - assertThat(pluginsLoader.pluginDescriptors().get(0).isModular(), is(true)); - - var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); - assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); - assertThat(pluginModuleLayer.modules(), contains(transformedMatch(Module::getName, equalTo("modular.plugin")))); - } finally { - closePluginLoaders(pluginsLoader); - } } public void testNonModularPluginLoading() throws Exception { @@ -261,6 +303,49 @@ public class A extends Plugin { } } + private static void createStablePlugin(Path home) throws IOException { + final Path plugins = home.resolve("plugins"); + final Path plugin = plugins.resolve(STABLE_PLUGIN_NAME); + Files.createDirectories(plugin); + PluginTestUtil.writeStablePluginProperties( + plugin, + "description", + "description", + "name", + STABLE_PLUGIN_NAME, + "version", + "1.0.0", + "elasticsearch.version", + Version.CURRENT.toString(), + "java.version", + System.getProperty("java.specification.version") + ); + + Path jar = plugin.resolve("impl.jar"); + JarUtils.createJarWithEntries(jar, Map.of("p/A.class", InMemoryJavaCompiler.compile("p.A", """ + package p; + import java.util.Map; + import org.elasticsearch.plugin.analysis.CharFilterFactory; + import org.elasticsearch.plugin.NamedComponent; + import java.io.Reader; + @NamedComponent( "a_name") + public class A implements CharFilterFactory { + @Override + public Reader create(Reader reader) { + return reader; + } + } + """))); + Path namedComponentFile = plugin.resolve("named_components.json"); + Files.writeString(namedComponentFile, """ + { + "org.elasticsearch.plugin.analysis.CharFilterFactory": { + "a_name": "p.A" + } + } + """); + } + // Closes the URLClassLoaders and UberModuleClassloaders created by the given plugin loader. // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. static void closePluginLoaders(PluginsLoader pluginsLoader) { diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 74caf2c38e309..e76994f69c01e 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -71,6 +71,7 @@ static PluginsService newPluginsService(Settings settings) { PluginsLoader.createPluginsLoader( Set.of(), PluginsLoader.loadPluginsBundles(TestEnvironment.newEnvironment(settings).pluginsFile()), + Map.of(), false ) ); diff --git a/server/src/test/java/org/elasticsearch/plugins/UberModuleClassLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/UberModuleClassLoaderTests.java index 19dcf9dcf7096..3e7f783779a51 100644 --- a/server/src/test/java/org/elasticsearch/plugins/UberModuleClassLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/UberModuleClassLoaderTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.plugins; import org.elasticsearch.common.Strings; +import org.elasticsearch.nativeaccess.NativeAccessUtil; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; @@ -35,12 +36,14 @@ import static java.util.Arrays.stream; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ESTestCase.WithoutSecurityManager public class UberModuleClassLoaderTests extends ESTestCase { + private static final String MODULE_NAME = "synthetic"; private static Set loaders = new HashSet<>(); /** @@ -57,7 +60,7 @@ public void testLoadFromJar() throws Exception { assertThat(c, notNullValue()); Object instance = c.getConstructor().newInstance(); assertThat(instance.toString(), equalTo("MyClass")); - assertThat(c.getModule().getName(), equalTo("synthetic")); + assertThat(c.getModule().getName(), equalTo(MODULE_NAME)); } { @@ -87,9 +90,9 @@ public void testSingleJarFindClass() throws Exception { { try (UberModuleClassLoader loader = getLoader(jar)) { - Class c = loader.findClass("synthetic", "p.MyClass"); + Class c = loader.findClass(MODULE_NAME, "p.MyClass"); assertThat(c, notNullValue()); - c = loader.findClass("synthetic", "p.DoesNotExist"); + c = loader.findClass(MODULE_NAME, "p.DoesNotExist"); assertThat(c, nullValue()); c = loader.findClass("does-not-exist", "p.MyClass"); assertThat(c, nullValue()); @@ -126,9 +129,9 @@ public void testSingleJarFindResources() throws Exception { } { - URL location = loader.findResource("synthetic", "p/MyClass.class"); + URL location = loader.findResource(MODULE_NAME, "p/MyClass.class"); assertThat(location, notNullValue()); - location = loader.findResource("synthetic", "p/DoesNotExist.class"); + location = loader.findResource(MODULE_NAME, "p/DoesNotExist.class"); assertThat(location, nullValue()); location = loader.findResource("does-not-exist", "p/MyClass.class"); assertThat(location, nullValue()); @@ -161,7 +164,7 @@ public void testHideSplitPackageInParentClassloader() throws Exception { try ( URLClassLoader parent = URLClassLoader.newInstance(urls, UberModuleClassLoaderTests.class.getClassLoader()); - UberModuleClassLoader loader = UberModuleClassLoader.getInstance(parent, "synthetic", Set.of(toUrl(jar))) + UberModuleClassLoader loader = UberModuleClassLoader.getInstance(parent, MODULE_NAME, Set.of(toUrl(jar))) ) { // stable plugin loader gives us the good class... Class c = loader.loadClass("p.MyClassInPackageP"); @@ -197,7 +200,7 @@ public void testNoParentFirstSearch() throws Exception { try ( URLClassLoader parent = URLClassLoader.newInstance(urls, UberModuleClassLoaderTests.class.getClassLoader()); - UberModuleClassLoader loader = UberModuleClassLoader.getInstance(parent, "synthetic", Set.of(toUrl(jar))) + UberModuleClassLoader loader = UberModuleClassLoader.getInstance(parent, MODULE_NAME, Set.of(toUrl(jar))) ) { // stable plugin loader gives us the good class... Class c = loader.loadClass("p.MyClass"); @@ -309,9 +312,10 @@ public String toString() { UberModuleClassLoader denyListLoader = UberModuleClassLoader.getInstance( UberModuleClassLoaderTests.class.getClassLoader(), ModuleLayer.boot(), - "synthetic", + MODULE_NAME, Set.of(toUrl(jar)), - Set.of("java.sql", "java.sql.rowset") // if present, java.sql.rowset requires java.sql transitively + Set.of("java.sql", "java.sql.rowset"), // if present, java.sql.rowset requires java.sql transitively + Set.of() ) ) { Class denyListed = denyListLoader.loadClass("p.MyImportingClass"); @@ -401,6 +405,37 @@ public void testServiceLoadingWithRedundantDeclarations() throws Exception { } } + public void testNativeAccessIsEnabled() throws Exception { + Path topLevelDir = createTempDir(getTestName()); + Path jar = topLevelDir.resolve("my-jar.jar"); + createMinimalJar(jar, "p.MyClass"); + + try ( + UberModuleClassLoader loader = UberModuleClassLoader.getInstance( + UberModuleClassLoaderTests.class.getClassLoader(), + ModuleLayer.boot(), + MODULE_NAME, + Set.of(UberModuleClassLoaderTests.pathToUrlUnchecked(jar)), + Set.of(), + Set.of(MODULE_NAME) + ) + ) { + { + Class c = loader.loadClass("p.MyClass"); + assertThat(c, notNullValue()); + Object instance = c.getConstructor().newInstance(); + assertThat(instance.toString(), equalTo("MyClass")); + assertThat(c.getModule().getName(), equalTo(MODULE_NAME)); + assertThat(NativeAccessUtil.isNativeAccessEnabled(c.getModule()), is(true)); + } + + { + ClassNotFoundException e = expectThrows(ClassNotFoundException.class, () -> loader.loadClass("p.DoesNotExist")); + assertThat(e.getMessage(), equalTo("p.DoesNotExist")); + } + } + } + private static void createServiceTestSingleJar(Path jar, boolean modularize, boolean addMetaInfService) throws IOException { String serviceInterface = """ package p; @@ -473,7 +508,7 @@ public void testServiceLoadingWithOptionalDependencies() throws Exception { try (UberModuleClassLoader loader = getServiceTestLoader(true)) { // check module descriptor - ModuleDescriptor synthetic = loader.getLayer().findModule("synthetic").orElseThrow().getDescriptor(); + ModuleDescriptor synthetic = loader.getLayer().findModule(MODULE_NAME).orElseThrow().getDescriptor(); assertThat( synthetic.uses(), @@ -526,7 +561,7 @@ public void testServiceLoadingWithoutOptionalDependencies() throws Exception { try (UberModuleClassLoader loader = getServiceTestLoader(false)) { // check module descriptor - ModuleDescriptor synthetic = loader.getLayer().findModule("synthetic").orElseThrow().getDescriptor(); + ModuleDescriptor synthetic = loader.getLayer().findModule(MODULE_NAME).orElseThrow().getDescriptor(); assertThat(synthetic.uses(), equalTo(Set.of("p.required.LetterService", "q.jar.one.NumberService", "q.jar.two.FooBarService"))); // the descriptor model uses a list ordering that we don't guarantee, so we convert the provider list to maps and sets Map> serviceProviders = synthetic.provides() @@ -615,8 +650,9 @@ private static UberModuleClassLoader getServiceTestLoader(boolean includeOptiona return UberModuleClassLoader.getInstance( parentLayer.findLoader(includeOptionalDeps ? "p.optional" : "p.required"), parentLayer, - "synthetic", + MODULE_NAME, jarPaths.stream().map(UberModuleClassLoaderTests::pathToUrlUnchecked).collect(Collectors.toSet()), + Set.of(), Set.of() ); } @@ -880,7 +916,7 @@ private static UberModuleClassLoader getLoader(Path jar) { private static UberModuleClassLoader getLoader(List jars) { return UberModuleClassLoader.getInstance( UberModuleClassLoaderTests.class.getClassLoader(), - "synthetic", + MODULE_NAME, jars.stream().map(UberModuleClassLoaderTests::pathToUrlUnchecked).collect(Collectors.toSet()) ); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 2b082f2f8b022..25a71d04b321d 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -317,9 +317,7 @@ public void testNullSource() { assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); - assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceAsString(), nullValue()); - assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); assertThat(searchHit.getSourceAsString(), nullValue()); } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index af430b2d18c51..d260d66157651 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -38,6 +39,7 @@ public class TransportHandshakerTests extends ESTestCase { private TestThreadPool threadPool; private TransportHandshaker.HandshakeRequestSender requestSender; + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) private static final TransportVersion HANDSHAKE_REQUEST_VERSION = TransportHandshaker.V8_HANDSHAKE_VERSION; @Override @@ -71,10 +73,15 @@ public void testHandshakeRequestAndResponse() throws IOException { assertFalse(versionFuture.isDone()); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current()); + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + TransportVersion.current(), + randomIdentifier() + ); BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + bytesStreamOutput.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(bytesStreamOutput); StreamInput input = bytesStreamOutput.bytes().streamInput(); + input.setTransportVersion(HANDSHAKE_REQUEST_VERSION); final PlainActionFuture responseFuture = new PlainActionFuture<>(); final TestTransportChannel channel = new TestTransportChannel(responseFuture); handshaker.handleHandshake(channel, reqId, input); @@ -95,7 +102,7 @@ public void testHandshakeResponseFromOlderNode() throws Exception { assertFalse(versionFuture.isDone()); final var remoteVersion = TransportVersionUtils.randomCompatibleVersion(random()); - handler.handleResponse(new TransportHandshaker.HandshakeResponse(remoteVersion)); + handler.handleResponse(new TransportHandshaker.HandshakeResponse(remoteVersion, randomIdentifier())); assertTrue(versionFuture.isDone()); assertEquals(remoteVersion, versionFuture.result()); @@ -110,7 +117,10 @@ public void testHandshakeResponseFromNewerNode() throws Exception { assertFalse(versionFuture.isDone()); handler.handleResponse( - new TransportHandshaker.HandshakeResponse(TransportVersion.fromId(TransportVersion.current().id() + between(0, 10))) + new TransportHandshaker.HandshakeResponse( + TransportVersion.fromId(TransportVersion.current().id() + between(0, 10)), + randomIdentifier() + ) ); assertTrue(versionFuture.isDone()); @@ -123,8 +133,12 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException verify(requestSender).sendRequest(node, channel, reqId, HANDSHAKE_REQUEST_VERSION); - TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest(TransportVersion.current()); + TransportHandshaker.HandshakeRequest handshakeRequest = new TransportHandshaker.HandshakeRequest( + TransportVersion.current(), + randomIdentifier() + ); BytesStreamOutput currentHandshakeBytes = new BytesStreamOutput(); + currentHandshakeBytes.setTransportVersion(HANDSHAKE_REQUEST_VERSION); handshakeRequest.writeTo(currentHandshakeBytes); BytesStreamOutput lengthCheckingHandshake = new BytesStreamOutput(); @@ -149,7 +163,118 @@ public void testHandshakeRequestFutureVersionsCompatibility() throws IOException TransportHandshaker.HandshakeResponse response = (TransportHandshaker.HandshakeResponse) responseFuture.actionGet(); - assertEquals(TransportVersion.current(), response.getResponseVersion()); + assertEquals(TransportVersion.current(), response.getTransportVersion()); + } + + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 + public void testReadV7HandshakeRequest() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var requestPayloadStreamOutput = new BytesStreamOutput(); + requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + requestPayloadStreamOutput.writeVInt(transportVersion.id()); + + final var requestBytesStreamOutput = new BytesStreamOutput(); + requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); + requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); + + final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); + requestBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); + + assertEquals(transportVersion, handshakeRequest.transportVersion); + assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); + } + + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // v7 handshakes are not supported in v9 + public void testReadV7HandshakeResponse() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var responseBytesStreamOutput = new BytesStreamOutput(); + responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + responseBytesStreamOutput.writeVInt(transportVersion.id()); + + final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); + responseBytesStream.setTransportVersion(TransportHandshaker.V7_HANDSHAKE_VERSION); + final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); + + assertEquals(transportVersion, handshakeResponse.getTransportVersion()); + assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); + } + + public void testReadV8HandshakeRequest() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var requestPayloadStreamOutput = new BytesStreamOutput(); + requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + requestPayloadStreamOutput.writeVInt(transportVersion.id()); + + final var requestBytesStreamOutput = new BytesStreamOutput(); + requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); + requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); + + final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); + requestBytesStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); + + assertEquals(transportVersion, handshakeRequest.transportVersion); + assertEquals(transportVersion.toReleaseVersion(), handshakeRequest.releaseVersion); + } + + public void testReadV8HandshakeResponse() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + + final var responseBytesStreamOutput = new BytesStreamOutput(); + responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + responseBytesStreamOutput.writeVInt(transportVersion.id()); + + final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); + responseBytesStream.setTransportVersion(TransportHandshaker.V8_HANDSHAKE_VERSION); + final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); + + assertEquals(transportVersion, handshakeResponse.getTransportVersion()); + assertEquals(transportVersion.toReleaseVersion(), handshakeResponse.getReleaseVersion()); + } + + public void testReadV9HandshakeRequest() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + final var releaseVersion = randomIdentifier(); + + final var requestPayloadStreamOutput = new BytesStreamOutput(); + requestPayloadStreamOutput.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + requestPayloadStreamOutput.writeVInt(transportVersion.id()); + requestPayloadStreamOutput.writeString(releaseVersion); + + final var requestBytesStreamOutput = new BytesStreamOutput(); + requestBytesStreamOutput.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + TaskId.EMPTY_TASK_ID.writeTo(requestBytesStreamOutput); + requestBytesStreamOutput.writeBytesReference(requestPayloadStreamOutput.bytes()); + + final var requestBytesStream = requestBytesStreamOutput.bytes().streamInput(); + requestBytesStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + final var handshakeRequest = new TransportHandshaker.HandshakeRequest(requestBytesStream); + + assertEquals(transportVersion, handshakeRequest.transportVersion); + assertEquals(releaseVersion, handshakeRequest.releaseVersion); + } + + public void testReadV9HandshakeResponse() throws IOException { + final var transportVersion = TransportVersionUtils.randomCompatibleVersion(random()); + final var releaseVersion = randomIdentifier(); + + final var responseBytesStreamOutput = new BytesStreamOutput(); + responseBytesStreamOutput.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + responseBytesStreamOutput.writeVInt(transportVersion.id()); + responseBytesStreamOutput.writeString(releaseVersion); + + final var responseBytesStream = responseBytesStreamOutput.bytes().streamInput(); + responseBytesStream.setTransportVersion(TransportHandshaker.V9_HANDSHAKE_VERSION); + final var handshakeResponse = new TransportHandshaker.HandshakeResponse(responseBytesStream); + + assertEquals(transportVersion, handshakeResponse.getTransportVersion()); + assertEquals(releaseVersion, handshakeResponse.getReleaseVersion()); } public void testHandshakeError() throws IOException { diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 4af8681bb939a..2e68c094492fe 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -86,7 +86,7 @@ public void skipOnAborted() { public void testSortByManyLongsSuccess() throws IOException { initManyLongs(); Response response = sortByManyLongs(500); - Map map = responseAsMap(response); + Map map = responseAsMap(response); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "b").entry("type", "long")); ListMatcher values = matchesList(); @@ -95,8 +95,7 @@ public void testSortByManyLongsSuccess() throws IOException { values = values.item(List.of(0, b)); } } - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } /** @@ -236,11 +235,10 @@ private StringBuilder makeSortByManyLongs(int count) { public void testGroupOnSomeLongs() throws IOException { initManyLongs(); Response resp = groupOnManyLongs(200); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(a)").entry("type", "long")); ListMatcher values = matchesList().item(List.of(9)); - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } /** @@ -249,11 +247,10 @@ public void testGroupOnSomeLongs() throws IOException { public void testGroupOnManyLongs() throws IOException { initManyLongs(); Response resp = groupOnManyLongs(5000); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(a)").entry("type", "long")); ListMatcher values = matchesList().item(List.of(9)); - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } private Response groupOnManyLongs(int count) throws IOException { @@ -279,12 +276,11 @@ private StringBuilder makeManyLongs(int count) { public void testSmallConcat() throws IOException { initSingleDocIndex(); Response resp = concat(2); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "str").entry("type", "keyword")); ListMatcher values = matchesList().item(List.of(1, "1".repeat(100))); - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, values); } public void testHugeConcat() throws IOException { @@ -465,7 +461,7 @@ private void assertManyStrings(Response resp, int strings) throws IOException { public void testManyEval() throws IOException { initManyLongs(); Response resp = manyEval(1); - Map map = responseAsMap(resp); + Map map = responseAsMap(resp); ListMatcher columns = matchesList(); columns = columns.item(matchesMap().entry("name", "a").entry("type", "long")); columns = columns.item(matchesMap().entry("name", "b").entry("type", "long")); @@ -475,8 +471,7 @@ public void testManyEval() throws IOException { for (int i = 0; i < 20; i++) { columns = columns.item(matchesMap().entry("name", "i0" + i).entry("type", "long")); } - MapMatcher mapMatcher = matchesMap(); - assertMap(map, mapMatcher.entry("columns", columns).entry("values", hasSize(10_000)).entry("took", greaterThanOrEqualTo(0))); + assertResultMap(map, columns, hasSize(10_000)); } public void testTooManyEval() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index bb48b0031483c..a62af5729a096 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1077,12 +1077,12 @@ public SyntheticSourceExample( this(b -> b.value(inputValue), b -> b.value(result), b -> b.value(blockLoaderResults), mapping); } - private void buildInput(XContentBuilder b) throws IOException { + public void buildInput(XContentBuilder b) throws IOException { b.field("field"); inputValue.accept(b); } - private void buildInputArray(XContentBuilder b, int elementCount) throws IOException { + public void buildInputArray(XContentBuilder b, int elementCount) throws IOException { b.startArray("field"); for (int i = 0; i < elementCount; i++) { inputValue.accept(b); @@ -1369,7 +1369,7 @@ public final void testSyntheticEmptyList() throws IOException { assertThat(syntheticSource(mapper, b -> b.startArray("field").endArray()), equalTo(expected)); } - private boolean shouldUseIgnoreMalformed() { + protected boolean shouldUseIgnoreMalformed() { // 5% of test runs use ignore_malformed return supportsIgnoreMalformed() && randomDouble() <= 0.05; } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index b0edbb829df2a..330058b16a811 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -889,7 +889,6 @@ public static SearchHit searchHitFromMap(Map values) { shardTarget, index, clusterAlias, - null, get(SearchHit.Fields.INNER_HITS, values, null), get(SearchHit.DOCUMENT_FIELDS, values, Collections.emptyMap()), get(SearchHit.METADATA_FIELDS, values, Collections.emptyMap()), diff --git a/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java b/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java index 4a5f280c10a99..ed5697586dacf 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java +++ b/test/framework/src/main/java/org/elasticsearch/search/retriever/TestCompoundRetrieverBuilder.java @@ -38,7 +38,7 @@ protected TestCompoundRetrieverBuilder clone(List newChildRetri } @Override - protected RankDoc[] combineInnerRetrieverResults(List rankResults) { + protected RankDoc[] combineInnerRetrieverResults(List rankResults, boolean explain) { return new RankDoc[0]; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index d239c6453a7fe..be30dbe9823d4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -636,7 +636,8 @@ QueryRewriteContext createQueryRewriteContext() { scriptService, createMockResolvedIndices(), null, - createMockQueryRewriteInterceptor() + createMockQueryRewriteInterceptor(), + false ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index b23ad1e9c548f..2647e21d34bc5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -70,12 +70,12 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MapMatcher; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -84,6 +84,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.After; import org.junit.AfterClass; @@ -133,12 +134,15 @@ import static org.elasticsearch.client.RestClient.IGNORE_RESPONSE_CODES_PARAM; import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.test.rest.TestFeatureService.ALL_FEATURES; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.notNullValue; @@ -1860,10 +1864,7 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (isSyntheticSourceConfiguredInMapping(mapping) - && SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); - } + } final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1907,52 +1908,16 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } - @SuppressWarnings("unchecked") - protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { - if (mapping == null) { - return false; - } - var mappings = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', - false - ); - if (mappings.containsKey("_doc")) { - mappings = (Map) mappings.get("_doc"); - } - Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); - if (sourceMapper == null) { - return false; - } - return sourceMapper.get("mode") != null; - } - - @SuppressWarnings("unchecked") - protected static boolean isSyntheticSourceConfiguredInTemplate(String template) { - if (template == null) { - return false; - } - var values = XContentHelper.convertToMap(JsonXContent.jsonXContent, template, false); - for (Object value : values.values()) { - Map mappings = (Map) ((Map) value).get("mappings"); - if (mappings == null) { - continue; - } - Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); - if (sourceMapper == null) { - continue; - } - Object mode = sourceMapper.get("mode"); - if (mode != null) { - return true; - } - } - return false; + protected static Map getIndexSettings(String index) throws IOException { + return getIndexSettings(index, false); } - protected static Map getIndexSettings(String index) throws IOException { + protected static Map getIndexSettings(String index, boolean includeDefaults) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); + if (includeDefaults) { + request.addParameter("include_defaults", "true"); + } Response response = client().performRequest(request); try (InputStream is = response.getEntity().getContent()) { return XContentHelper.convertToMap( @@ -2570,4 +2535,46 @@ public static Request newXContentRequest(HttpMethod method, String endpoint, ToX addXContentBody(request, body); return request; } + + protected static MapMatcher getResultMatcher(boolean includeMetadata, boolean includePartial) { + MapMatcher mapMatcher = matchesMap(); + if (includeMetadata) { + mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); + } + // Older version may not have is_partial + if (includePartial) { + mapMatcher = mapMatcher.entry("is_partial", false); + } + return mapMatcher; + } + + /** + * Create empty result matcher from result, taking into account all metadata items. + */ + protected static MapMatcher getResultMatcher(Map result) { + return getResultMatcher(result.containsKey("took"), result.containsKey("is_partial")); + } + + /** + * Match result columns and values, with default matchers for metadata. + */ + protected static void assertResultMap(Map result, Matcher columnMatcher, Matcher valuesMatcher) { + assertMap(result, getResultMatcher(result).entry("columns", columnMatcher).entry("values", valuesMatcher)); + } + + protected static void assertResultMap(Map result, Object columnMatcher, Object valuesMatcher) { + assertMap(result, getResultMatcher(result).entry("columns", columnMatcher).entry("values", valuesMatcher)); + } + + /** + * Match result columns and values, with default matchers for metadata. + */ + protected static void assertResultMap( + Map result, + MapMatcher mapMatcher, + Matcher columnMatcher, + Matcher valuesMatcher + ) { + assertMap(result, mapMatcher.entry("columns", columnMatcher).entry("values", valuesMatcher)); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java index 53758c165a3c7..05e8a93ad99ed 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.test.index; -import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; @@ -19,26 +18,19 @@ import java.util.List; import java.util.Set; +import static org.hamcrest.Matchers.equalTo; + public class IndexVersionUtilsTests extends ESTestCase { /** * Tests that {@link IndexVersions#MINIMUM_COMPATIBLE} and {@link IndexVersionUtils#allReleasedVersions()} - * agree with the list of index compatible versions we build in gradle. + * agree on the minimum version that should be tested. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98054") - public void testGradleVersionsMatchVersionUtils() { + public void testIndexCompatibleVersionMatches() { VersionsFromProperty indexCompatible = new VersionsFromProperty("tests.gradle_index_compat_versions"); - List released = IndexVersionUtils.allReleasedVersions() - .stream() - /* Java lists all versions from the 5.x series onwards, but we only want to consider - * ones that we're supposed to be compatible with. */ - .filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE)) - .toList(); - List releasedIndexCompatible = released.stream() - .filter(v -> IndexVersion.current().equals(v) == false) - .map(Object::toString) - .toList(); - assertEquals(releasedIndexCompatible, indexCompatible.released); + String minIndexVersion = IndexVersions.MINIMUM_COMPATIBLE.toReleaseVersion(); + String lowestCompatibleVersion = indexCompatible.released.get(0); + assertThat(lowestCompatibleVersion, equalTo(minIndexVersion)); } /** diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 5a212e5b1ec58..79ceec5fdf04d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; @@ -485,8 +484,6 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } - unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); - if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java new file mode 100644 index 0000000000000..7113cbca279d3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncStopRequest.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.async; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request for TransportEsqlAsyncStopAction action. + */ +public class AsyncStopRequest extends ActionRequest { + private final String id; + + /** + * Creates a new request + * + * @param id The id of the search progress request. + */ + public AsyncStopRequest(String id) { + this.id = id; + } + + public AsyncStopRequest(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + /** + * Returns the id of the async search. + */ + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AsyncStopRequest request = (AsyncStopRequest) o; + return Objects.equals(id, request.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java index 81ab54fc2db5f..7555db8fc85e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlAsyncActionNames.java @@ -12,4 +12,5 @@ */ public class EsqlAsyncActionNames { public static final String ESQL_ASYNC_GET_RESULT_ACTION_NAME = "indices:data/read/esql/async/get"; + public static final String ESQL_ASYNC_STOP_ACTION_NAME = "indices:data/read/esql/async/stop"; } diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java new file mode 100644 index 0000000000000..230e5e4dd3c6e --- /dev/null +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichProcessorMaxMatchesIT.java @@ -0,0 +1,191 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.enrich; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; + +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.ingest.IngestPipelineTestUtils.jsonSimulatePipelineRequest; +import static org.elasticsearch.xpack.enrich.AbstractEnrichTestCase.createSourceIndices; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.nullValue; + +public class EnrichProcessorMaxMatchesIT extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(LocalStateEnrich.class, ReindexPlugin.class, IngestCommonPlugin.class); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder() + // TODO Change this to run with security enabled + // https://github.com/elastic/elasticsearch/issues/75940 + .put(XPackSettings.SECURITY_ENABLED.getKey(), false) + .build(); + } + + public void testEnrichCacheValuesAndMaxMatches() { + // this test is meant to be much less ignorable than a mere comment in the code, since the behavior here is tricky. + + // there's an interesting edge case where two processors could be using the same policy and search, etc, + // but that they have a different number of max_matches -- if we're not careful about how we implement caching, + // then we could miss that edge case and return the wrong results from the cache. + + // Ensure enrich cache is empty + var statsRequest = new EnrichStatsAction.Request(TEST_REQUEST_TIMEOUT); + var statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); + assertThat(statsResponse.getCacheStats().size(), equalTo(1)); + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).misses(), equalTo(0L)); + assertThat(statsResponse.getCacheStats().get(0).hits(), equalTo(0L)); + + String policyName = "kv"; + String sourceIndexName = "kv"; + + var enrichPolicy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(sourceIndexName), "key", List.of("value")); + + // Create source index and add two documents: + createSourceIndices(client(), enrichPolicy); + { + IndexRequest indexRequest = new IndexRequest(sourceIndexName); + indexRequest.create(true); + indexRequest.source(""" + { + "key": "k1", + "value": "v1" + } + """, XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + } + { + IndexRequest indexRequest = new IndexRequest(sourceIndexName); + indexRequest.create(true); + indexRequest.source(""" + { + "key": "k1", + "value": "v2" + } + """, XContentType.JSON); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + } + + // Store policy and execute it: + var putPolicyRequest = new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName, enrichPolicy); + client().execute(PutEnrichPolicyAction.INSTANCE, putPolicyRequest).actionGet(); + var executePolicyRequest = new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policyName); + client().execute(ExecuteEnrichPolicyAction.INSTANCE, executePolicyRequest).actionGet(); + + { + // run a single enrich processor to fill the cache, note that the default max_matches is 1 (so it's not given explicitly here) + var simulatePipelineRequest = jsonSimulatePipelineRequest(""" + { + "pipeline": { + "processors" : [ + { + "enrich": { + "policy_name": "kv", + "field": "key", + "target_field": "result" + } + } + ] + }, + "docs": [ + { + "_source": { + "key": "k1" + } + } + ] + } + """); + var response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); + var result = (SimulateDocumentBaseResult) response.getResults().get(0); + assertThat(result.getFailure(), nullValue()); + // it's not actually important in this specific test whether the result is v1 or v2 + assertThat(result.getIngestDocument().getFieldValue("result.value", String.class), containsString("v")); + } + + { + // run two enrich processors with different max_matches, and see if we still get the right behavior + var simulatePipelineRequest = jsonSimulatePipelineRequest(""" + { + "pipeline": { + "processors" : [ + { + "enrich": { + "policy_name": "kv", + "field": "key", + "target_field": "result" + } + }, + { + "enrich": { + "policy_name": "kv", + "field": "key", + "target_field": "results", + "max_matches": 8 + } + } + ] + }, + "docs": [ + { + "_source": { + "key": "k1" + } + } + ] + } + """); + var response = clusterAdmin().simulatePipeline(simulatePipelineRequest).actionGet(); + var result = (SimulateDocumentBaseResult) response.getResults().get(0); + assertThat(result.getFailure(), nullValue()); + // it's not actually important in this specific test whether the result is v1 or v2 + assertThat(result.getIngestDocument().getFieldValue("result.value", String.class), containsString("v")); + + // this is the important part of the test -- did the max_matches=1 case pollute the cache for the max_matches=8 case? + @SuppressWarnings("unchecked") + List> results = (List>) result.getIngestDocument().getSource().get("results"); + List values = results.stream().map(m -> m.get("value")).toList(); + // if these assertions fail, it probably means you were fussing about with the EnrichCache.CacheKey and tried removing + // the max_matches accounting from it + assertThat(values, containsInAnyOrder("v1", "v2")); + assertThat(values, hasSize(2)); + } + + statsResponse = client().execute(EnrichStatsAction.INSTANCE, statsRequest).actionGet(); + assertThat(statsResponse.getCacheStats().size(), equalTo(1)); + // there are two items in the cache, the single result from max_matches 1 (implied), and the multi-result from max_matches 8 + assertThat(statsResponse.getCacheStats().get(0).count(), equalTo(2L)); + } + +} diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java index ddcad949b6a79..c2bcc67184958 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java @@ -20,22 +20,24 @@ import java.util.List; import java.util.Map; import java.util.function.BiConsumer; +import java.util.function.Supplier; public abstract class AbstractEnrichProcessor extends AbstractProcessor { private final String policyName; - private final BiConsumer>, Exception>> searchRunner; + private final EnrichProcessorFactory.SearchRunner searchRunner; private final TemplateScript.Factory field; private final TemplateScript.Factory targetField; private final boolean ignoreMissing; private final boolean overrideEnabled; protected final String matchField; protected final int maxMatches; + private final String indexAlias; protected AbstractEnrichProcessor( String tag, String description, - BiConsumer>, Exception>> searchRunner, + EnrichProcessorFactory.SearchRunner searchRunner, String policyName, TemplateScript.Factory field, TemplateScript.Factory targetField, @@ -53,6 +55,8 @@ protected AbstractEnrichProcessor( this.overrideEnabled = overrideEnabled; this.matchField = matchField; this.maxMatches = maxMatches; + // note: since the policyName determines the indexAlias, we can calculate this once + this.indexAlias = EnrichPolicy.getBaseName(policyName); } public abstract QueryBuilder getQueryBuilder(Object fieldValue); @@ -68,20 +72,23 @@ public void execute(IngestDocument ingestDocument, BiConsumer { + Supplier searchRequestSupplier = () -> { + QueryBuilder queryBuilder = getQueryBuilder(value); + ConstantScoreQueryBuilder constantScore = new ConstantScoreQueryBuilder(queryBuilder); + SearchSourceBuilder searchBuilder = new SearchSourceBuilder(); + searchBuilder.from(0); + searchBuilder.size(maxMatches); + searchBuilder.trackScores(false); + searchBuilder.fetchSource(true); + searchBuilder.query(constantScore); + SearchRequest req = new SearchRequest(); + req.indices(indexAlias); + req.preference(Preference.LOCAL.type()); + req.source(searchBuilder); + return req; + }; + + searchRunner.accept(value, maxMatches, searchRequestSupplier, (searchHits, e) -> { if (e != null) { handler.accept(null, e); return; diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java index 400d9f0cc84b7..d11ca41b3fbaa 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichCache.java @@ -8,16 +8,12 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexAbstraction; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; @@ -26,9 +22,8 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; -import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.LongSupplier; import java.util.function.ToLongBiFunction; @@ -36,28 +31,24 @@ * A simple cache for enrich that uses {@link Cache}. There is one instance of this cache and * multiple enrich processors with different policies will use this cache. *

- * The key of the cache is based on the search request and the enrich index that will be used. - * Search requests that enrich generates target the alias for an enrich policy, this class - * resolves the alias to the actual enrich index and uses that for the cache key. This way - * no stale entries will be returned if a policy execution happens and a new enrich index is created. - *

* There is no cleanup mechanism of stale entries in case a new enrich index is created * as part of a policy execution. This shouldn't be needed as cache entries for prior enrich * indices will be eventually evicted, because these entries will not end up being used. The * latest enrich index name will be used as cache key after an enrich policy execution. - * (Also a cleanup mechanism also wouldn't be straightforward to implement, + * (Also a cleanup mechanism wouldn't be straightforward to implement, * since there is no easy check to see that an enrich index used as cache key no longer is the - * current enrich index the enrich alias of an policy refers to. It would require checking + * current enrich index that the enrich alias of a policy refers to. It would require checking * all cached entries on each cluster state update) */ public final class EnrichCache { + private static final CacheValue EMPTY_CACHE_VALUE = new CacheValue(List.of(), CacheKey.CACHE_KEY_SIZE); + private final Cache cache; private final LongSupplier relativeNanoTimeProvider; private final AtomicLong hitsTimeInNanos = new AtomicLong(0); private final AtomicLong missesTimeInNanos = new AtomicLong(0); private final AtomicLong sizeInBytes = new AtomicLong(0); - private volatile Metadata metadata; EnrichCache(long maxSize) { this(maxSize, System::nanoTime); @@ -89,30 +80,36 @@ private Cache createCache(long maxWeight, ToLongBiFunction } /** - * This method notifies the given listener of the value in this cache for the given searchRequest. If there is no value in the cache - * for the searchRequest, then the new cache value is computed using searchResponseFetcher. - * @param searchRequest The key for the cache request + * This method notifies the given listener of the value in this cache for the given search parameters. If there is no value in the cache + * for these search parameters, then the new cache value is computed using searchResponseFetcher. + * + * @param enrichIndex The enrich index from which the results will be retrieved + * @param lookupValue The value that will be used in the search + * @param maxMatches The max number of matches that the search will return * @param searchResponseFetcher The function used to compute the value to be put in the cache, if there is no value in the cache already * @param listener A listener to be notified of the value in the cache */ public void computeIfAbsent( - SearchRequest searchRequest, - BiConsumer> searchResponseFetcher, + String enrichIndex, + Object lookupValue, + int maxMatches, + Consumer> searchResponseFetcher, ActionListener>> listener ) { // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. long cacheStart = relativeNanoTimeProvider.getAsLong(); - List> response = get(searchRequest); + var cacheKey = new CacheKey(enrichIndex, lookupValue, maxMatches); + List> response = get(cacheKey); long cacheRequestTime = relativeNanoTimeProvider.getAsLong() - cacheStart; if (response != null) { hitsTimeInNanos.addAndGet(cacheRequestTime); listener.onResponse(response); } else { final long retrieveStart = relativeNanoTimeProvider.getAsLong(); - searchResponseFetcher.accept(searchRequest, ActionListener.wrap(resp -> { - CacheValue value = toCacheValue(resp); - put(searchRequest, value); - List> copy = deepCopy(value.hits, false); + searchResponseFetcher.accept(ActionListener.wrap(resp -> { + CacheValue cacheValue = toCacheValue(resp); + put(cacheKey, cacheValue); + List> copy = deepCopy(cacheValue.hits, false); long databaseQueryAndCachePutTime = relativeNanoTimeProvider.getAsLong() - retrieveStart; missesTimeInNanos.addAndGet(cacheRequestTime + databaseQueryAndCachePutTime); listener.onResponse(copy); @@ -121,10 +118,7 @@ public void computeIfAbsent( } // non-private for unit testing only - List> get(SearchRequest searchRequest) { - String enrichIndex = getEnrichIndexKey(searchRequest); - CacheKey cacheKey = new CacheKey(enrichIndex, searchRequest); - + List> get(CacheKey cacheKey) { CacheValue response = cache.get(cacheKey); if (response != null) { return deepCopy(response.hits, false); @@ -134,18 +128,11 @@ public void computeIfAbsent( } // non-private for unit testing only - void put(SearchRequest searchRequest, CacheValue cacheValue) { - String enrichIndex = getEnrichIndexKey(searchRequest); - CacheKey cacheKey = new CacheKey(enrichIndex, searchRequest); - + void put(CacheKey cacheKey, CacheValue cacheValue) { cache.put(cacheKey, cacheValue); sizeInBytes.addAndGet(cacheValue.sizeInBytes); } - void setMetadata(Metadata metadata) { - this.metadata = metadata; - } - public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { Cache.CacheStats cacheStats = cache.stats(); return new EnrichStatsAction.Response.CacheStats( @@ -160,21 +147,19 @@ public EnrichStatsAction.Response.CacheStats getStats(String localNodeId) { ); } - private String getEnrichIndexKey(SearchRequest searchRequest) { - String alias = searchRequest.indices()[0]; - IndexAbstraction ia = metadata.getIndicesLookup().get(alias); - if (ia == null) { - throw new IndexNotFoundException("no generated enrich index [" + alias + "]"); - } - return ia.getIndices().get(0).getName(); - } - static CacheValue toCacheValue(SearchResponse response) { + if (response.getHits().getHits().length == 0) { + return EMPTY_CACHE_VALUE; + } List> result = new ArrayList<>(response.getHits().getHits().length); - long size = 0; + // Include the size of the cache key. + long size = CacheKey.CACHE_KEY_SIZE; for (SearchHit hit : response.getHits()) { - result.add(deepCopy(hit.getSourceAsMap(), true)); + // There is a cost of decompressing source here plus caching it. + // We do it first so we don't decompress it twice. size += hit.getSourceRef() != null ? hit.getSourceRef().ramBytesUsed() : 0; + // Do we need deep copy here, we are creating a modifiable map already? + result.add(deepCopy(hit.getSourceAsMap(), true)); } return new CacheValue(Collections.unmodifiableList(result), size); } @@ -206,28 +191,26 @@ private static Object innerDeepCopy(Object value, boolean unmodifiable) { } } - private static class CacheKey { - - final String enrichIndex; - final SearchRequest searchRequest; - - private CacheKey(String enrichIndex, SearchRequest searchRequest) { - this.enrichIndex = enrichIndex; - this.searchRequest = searchRequest; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CacheKey cacheKey = (CacheKey) o; - return enrichIndex.equals(cacheKey.enrichIndex) && searchRequest.equals(cacheKey.searchRequest); - } - - @Override - public int hashCode() { - return Objects.hash(enrichIndex, searchRequest); - } + /** + * The cache key consists of the (variable) parameters that are used to construct a search request for the enrich lookup. We define a + * custom record to group these fields to avoid constructing and storing the much larger + * {@link org.elasticsearch.action.search.SearchRequest}. + * + * @param enrichIndex The enrich index (i.e. not the alias, but the concrete index that the alias points to) + * @param lookupValue The value that is used to find matches in the enrich index + * @param maxMatches The max number of matches that the enrich lookup should return. This changes the size of the search response and + * should thus be included in the cache key + */ + // Visibility for testing + record CacheKey(String enrichIndex, Object lookupValue, int maxMatches) { + /** + * In reality, the size in bytes of the cache key is a function of the {@link CacheKey#lookupValue} field plus some constant for + * the object itself, the string reference for the enrich index (but not the string itself because it's taken from the metadata), + * and the integer for the max number of matches. However, by defining a static cache key size, we can make the + * {@link EnrichCache#EMPTY_CACHE_VALUE} static as well, which allows us to avoid having to instantiate new cache values for + * empty results and thus save some heap space. + */ + private static final long CACHE_KEY_SIZE = 256L; } // Visibility for testing diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java index 9890a96aae820..0c1ad73c96c26 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactory.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.Processor; import org.elasticsearch.script.ScriptService; @@ -29,6 +30,7 @@ import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ENRICH_ORIGIN; @@ -50,12 +52,12 @@ final class EnrichProcessorFactory implements Processor.Factory, Consumer processorFactories, String tag, String description, Map config) throws Exception { - String policyName = ConfigurationUtils.readStringProperty(TYPE, tag, config, "policy_name"); - String policyAlias = EnrichPolicy.getBaseName(policyName); + final String policyName = ConfigurationUtils.readStringProperty(TYPE, tag, config, "policy_name"); + final String indexAlias = EnrichPolicy.getBaseName(policyName); if (metadata == null) { throw new IllegalStateException("enrich processor factory has not yet been initialized with cluster state"); } - IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(policyAlias); + IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(indexAlias); if (indexAbstraction == null) { throw new IllegalArgumentException("no enrich index exists for policy with name [" + policyName + "]"); } @@ -78,7 +80,7 @@ public Processor create(Map processorFactories, Strin if (maxMatches < 1 || maxMatches > 128) { throw ConfigurationUtils.newConfigurationException(TYPE, tag, "max_matches", "should be between 1 and 128"); } - BiConsumer>, Exception>> searchRunner = createSearchRunner(client, enrichCache); + var searchRunner = createSearchRunner(indexAlias, client, enrichCache); switch (policyType) { case EnrichPolicy.MATCH_TYPE: case EnrichPolicy.RANGE_TYPE: @@ -121,25 +123,40 @@ public Processor create(Map processorFactories, Strin @Override public void accept(ClusterState state) { metadata = state.getMetadata(); - enrichCache.setMetadata(metadata); } - private static BiConsumer>, Exception>> createSearchRunner( - Client client, - EnrichCache enrichCache - ) { + private SearchRunner createSearchRunner(String indexAlias, Client client, EnrichCache enrichCache) { Client originClient = new OriginSettingClient(client, ENRICH_ORIGIN); - return (req, handler) -> { + return (value, maxMatches, reqSupplier, handler) -> { // intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. enrichCache.computeIfAbsent( - req, - (searchRequest, searchResponseActionListener) -> originClient.execute( + getEnrichIndexKey(indexAlias), + value, + maxMatches, + (searchResponseActionListener) -> originClient.execute( EnrichCoordinatorProxyAction.INSTANCE, - searchRequest, + reqSupplier.get(), searchResponseActionListener ), ActionListener.wrap(resp -> handler.accept(resp, null), e -> handler.accept(null, e)) ); }; } + + private String getEnrichIndexKey(String indexAlias) { + IndexAbstraction ia = metadata.getIndicesLookup().get(indexAlias); + if (ia == null) { + throw new IndexNotFoundException("no generated enrich index [" + indexAlias + "]"); + } + return ia.getIndices().get(0).getName(); + } + + public interface SearchRunner { + void accept( + Object value, + int maxMatches, + Supplier searchRequestSupplier, + BiConsumer>, Exception> handler + ); + } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java index dd164c630495c..998b06e870b7f 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.enrich; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.ShapeRelation; @@ -15,10 +14,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.TemplateScript; -import java.util.List; -import java.util.Map; -import java.util.function.BiConsumer; - public final class GeoMatchProcessor extends AbstractEnrichProcessor { private final ShapeRelation shapeRelation; @@ -27,7 +22,7 @@ public final class GeoMatchProcessor extends AbstractEnrichProcessor { GeoMatchProcessor( String tag, String description, - BiConsumer>, Exception>> searchRunner, + EnrichProcessorFactory.SearchRunner searchRunner, String policyName, TemplateScript.Factory field, TemplateScript.Factory targetField, diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java index 76156c84c22b2..b8b2f1b17fa85 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/MatchProcessor.java @@ -6,22 +6,19 @@ */ package org.elasticsearch.xpack.enrich; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.script.TemplateScript; import java.util.List; -import java.util.Map; -import java.util.function.BiConsumer; public final class MatchProcessor extends AbstractEnrichProcessor { MatchProcessor( String tag, String description, - BiConsumer>, Exception>> searchRunner, + EnrichProcessorFactory.SearchRunner searchRunner, String policyName, TemplateScript.Factory field, TemplateScript.Factory targetField, diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java index 19af929017a3b..7125dfd45eaff 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichCacheTests.java @@ -7,23 +7,14 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.AliasMetadata; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; import java.io.IOException; @@ -35,7 +26,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -45,47 +35,19 @@ public class EnrichCacheTests extends ESTestCase { public void testCaching() { - // Emulate cluster metadata: - // (two enrich indices with corresponding alias entries) - var metadata = Metadata.builder() - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) - ) - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) - ) - .build(); - // Emulated search requests that an enrich processor could generate: // (two unique searches for two enrich policies) - var searchRequest1 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) - ); - var searchRequest2 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "2")) - ); - var searchRequest3 = new SearchRequest(EnrichPolicy.getBaseName("policy2")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) - ); - var searchRequest4 = new SearchRequest(EnrichPolicy.getBaseName("policy2")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "2")) - ); + var cacheKey1 = new EnrichCache.CacheKey("policy1-1", "1", 1); + var cacheKey2 = new EnrichCache.CacheKey("policy1-1", "2", 1); + var cacheKey3 = new EnrichCache.CacheKey("policy2-1", "1", 1); + var cacheKey4 = new EnrichCache.CacheKey("policy2-1", "2", 1); // Emulated search response (content doesn't matter, since it isn't used, it just a cache entry) EnrichCache.CacheValue searchResponse = new EnrichCache.CacheValue(List.of(Map.of("test", "entry")), 1L); EnrichCache enrichCache = new EnrichCache(3); - enrichCache.setMetadata(metadata); - enrichCache.put(searchRequest1, searchResponse); - enrichCache.put(searchRequest2, searchResponse); - enrichCache.put(searchRequest3, searchResponse); + enrichCache.put(cacheKey1, searchResponse); + enrichCache.put(cacheKey2, searchResponse); + enrichCache.put(cacheKey3, searchResponse); var cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(0L)); @@ -93,10 +55,10 @@ public void testCaching() { assertThat(cacheStats.evictions(), equalTo(0L)); assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L)); - assertThat(enrichCache.get(searchRequest1), notNullValue()); - assertThat(enrichCache.get(searchRequest2), notNullValue()); - assertThat(enrichCache.get(searchRequest3), notNullValue()); - assertThat(enrichCache.get(searchRequest4), nullValue()); + assertThat(enrichCache.get(cacheKey1), notNullValue()); + assertThat(enrichCache.get(cacheKey2), notNullValue()); + assertThat(enrichCache.get(cacheKey3), notNullValue()); + assertThat(enrichCache.get(cacheKey4), nullValue()); cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(3L)); @@ -104,7 +66,7 @@ public void testCaching() { assertThat(cacheStats.evictions(), equalTo(0L)); assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L)); - enrichCache.put(searchRequest4, searchResponse); + enrichCache.put(cacheKey4, searchResponse); cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(3L)); @@ -112,41 +74,27 @@ public void testCaching() { assertThat(cacheStats.evictions(), equalTo(1L)); assertThat(cacheStats.cacheSizeInBytes(), equalTo(3L)); - // Simulate enrich policy execution, which should make current cache entries unused. - metadata = Metadata.builder() - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-2") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) - ) - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-2") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) - ) - .build(); - enrichCache.setMetadata(metadata); + cacheKey1 = new EnrichCache.CacheKey("policy1-2", "1", 1); + cacheKey2 = new EnrichCache.CacheKey("policy1-2", "2", 1); + cacheKey3 = new EnrichCache.CacheKey("policy2-2", "1", 1); + cacheKey4 = new EnrichCache.CacheKey("policy2-2", "2", 1); // Because enrich index has changed, cache can't serve cached entries - assertThat(enrichCache.get(searchRequest1), nullValue()); - assertThat(enrichCache.get(searchRequest2), nullValue()); - assertThat(enrichCache.get(searchRequest3), nullValue()); - assertThat(enrichCache.get(searchRequest4), nullValue()); + assertThat(enrichCache.get(cacheKey1), nullValue()); + assertThat(enrichCache.get(cacheKey2), nullValue()); + assertThat(enrichCache.get(cacheKey3), nullValue()); + assertThat(enrichCache.get(cacheKey4), nullValue()); // Add new entries using new enrich index name as key - enrichCache.put(searchRequest1, searchResponse); - enrichCache.put(searchRequest2, searchResponse); - enrichCache.put(searchRequest3, searchResponse); + enrichCache.put(cacheKey1, searchResponse); + enrichCache.put(cacheKey2, searchResponse); + enrichCache.put(cacheKey3, searchResponse); // Entries can now be served: - assertThat(enrichCache.get(searchRequest1), notNullValue()); - assertThat(enrichCache.get(searchRequest2), notNullValue()); - assertThat(enrichCache.get(searchRequest3), notNullValue()); - assertThat(enrichCache.get(searchRequest4), nullValue()); + assertThat(enrichCache.get(cacheKey1), notNullValue()); + assertThat(enrichCache.get(cacheKey2), notNullValue()); + assertThat(enrichCache.get(cacheKey3), notNullValue()); + assertThat(enrichCache.get(cacheKey4), nullValue()); cacheStats = enrichCache.getStats("_id"); assertThat(cacheStats.count(), equalTo(3L)); assertThat(cacheStats.hits(), equalTo(6L)); @@ -156,30 +104,8 @@ public void testCaching() { } public void testComputeIfAbsent() throws InterruptedException { - // Emulate cluster metadata: - // (two enrich indices with corresponding alias entries) - var metadata = Metadata.builder() - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy1") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy1")).build()) - ) - .put( - IndexMetadata.builder(EnrichPolicy.getBaseName("policy2") + "-1") - .settings(settings(IndexVersion.current())) - .numberOfShards(1) - .numberOfReplicas(0) - .putAlias(AliasMetadata.builder(EnrichPolicy.getBaseName("policy2")).build()) - ) - .build(); - // Emulated search requests that an enrich processor could generate: // (two unique searches for two enrich policies) - var searchRequest1 = new SearchRequest(EnrichPolicy.getBaseName("policy1")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("match_field", "1")) - ); final List> searchResponseMap = List.of( Map.of("key1", "value1", "key2", "value2"), Map.of("key3", "value3", "key4", "value4") @@ -187,12 +113,11 @@ public void testComputeIfAbsent() throws InterruptedException { final AtomicLong testNanoTime = new AtomicLong(0); // We use a relative time provider that increments 1ms every time it is called. So each operation appears to take 1ms EnrichCache enrichCache = new EnrichCache(3, () -> testNanoTime.addAndGet(TimeValue.timeValueMillis(1).getNanos())); - enrichCache.setMetadata(metadata); { CountDownLatch queriedDatabaseLatch = new CountDownLatch(1); CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); - enrichCache.computeIfAbsent(searchRequest1, (searchRequest, searchResponseActionListener) -> { + enrichCache.computeIfAbsent("policy1-1", "1", 1, (searchResponseActionListener) -> { SearchResponse searchResponse = convertToSearchResponse(searchResponseMap); searchResponseActionListener.onResponse(searchResponse); searchResponse.decRef(); @@ -222,7 +147,7 @@ public void onFailure(Exception e) { { CountDownLatch notifiedOfResultLatch = new CountDownLatch(1); - enrichCache.computeIfAbsent(searchRequest1, (searchRequest, searchResponseActionListener) -> { + enrichCache.computeIfAbsent("policy1-1", "1", 1, (searchResponseActionListener) -> { fail("Expected no call to the database because item should have been in the cache"); }, new ActionListener<>() { @Override @@ -326,22 +251,4 @@ public void testDeepCopy() { assertArrayEquals(new byte[] { 1, 2, 3 }, (byte[]) result.get("embedded_object")); } - public void testEnrichIndexNotExist() { - // Emulate cluster metadata: - var metadata = Metadata.builder().build(); - - // Emulated search request on a non-exist enrich index that an enrich processor could generate - var searchRequest = new SearchRequest(EnrichPolicy.getBaseName("policy-enrich-index-not-generated")).source( - new SearchSourceBuilder().query(new MatchQueryBuilder("test", "query")) - ); - // Emulated search response (content doesn't matter, since it isn't used, it just a cache entry) - EnrichCache.CacheValue searchResponse = new EnrichCache.CacheValue(List.of(Map.of("test", "entry")), 1L); - - EnrichCache enrichCache = new EnrichCache(1); - enrichCache.setMetadata(metadata); - - IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> enrichCache.put(searchRequest, searchResponse)); - assertThat(e.getMessage(), containsString("no generated enrich index [.enrich-policy-enrich-index-not-generated]")); - } - } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java index 5642e685a592d..fcf2bc3c14292 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.Map; import java.util.function.BiConsumer; +import java.util.function.Supplier; import static org.elasticsearch.xpack.enrich.MatchProcessorTests.str; import static org.hamcrest.Matchers.emptyArray; @@ -139,7 +140,7 @@ private void testBasicsForFieldValue(Object fieldValue, Geometry expectedGeometr } - private static final class MockSearchFunction implements BiConsumer>, Exception>> { + private static final class MockSearchFunction implements EnrichProcessorFactory.SearchRunner { private final List> mockResponse; private final SetOnce capturedRequest; private final Exception exception; @@ -157,8 +158,13 @@ private static final class MockSearchFunction implements BiConsumer>, Exception> handler) { - capturedRequest.set(request); + public void accept( + Object value, + int maxMatches, + Supplier searchRequestSupplier, + BiConsumer>, Exception> handler + ) { + capturedRequest.set(searchRequestSupplier.get()); if (exception != null) { handler.accept(null, exception); } else { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index 0d7f900188ba1..b4d3ec15d31d3 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.function.BiConsumer; +import java.util.function.Supplier; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; @@ -376,7 +377,7 @@ public void testArray() { assertThat(entry.get("tld"), equalTo("co")); } - private static final class MockSearchFunction implements BiConsumer>, Exception>> { + private static final class MockSearchFunction implements EnrichProcessorFactory.SearchRunner { private final List> mockResponse; private final SetOnce capturedRequest; private final Exception exception; @@ -394,8 +395,13 @@ private static final class MockSearchFunction implements BiConsumer>, Exception> handler) { - capturedRequest.set(request); + public void accept( + Object value, + int maxMatches, + Supplier searchRequestSupplier, + BiConsumer>, Exception> handler + ) { + capturedRequest.set(searchRequestSupplier.get()); if (exception != null) { handler.accept(null, exception); } else { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 8dbc9b0f4f43a..0ae84b62bafdc 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -22,6 +22,7 @@ import java.util.Collection; import java.util.List; +import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; @@ -65,11 +66,9 @@ public void testExecute() throws Exception { for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); - assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); - assertThat( - response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), - equalTo("value1") - ); + Map sourceAsMap = response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap(); + assertThat(sourceAsMap.size(), equalTo(1)); + assertThat(sourceAsMap.get("key1"), equalTo("value1")); } } ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 9b264a2cc41cf..d80a4af3dbac6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -33,7 +33,6 @@ import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; @@ -433,13 +432,12 @@ public void onFailure(Exception e) { private static QueryRulesetResult mapSearchResponseToQueryRulesetList(SearchResponse response) { final List rulesetResults = Arrays.stream(response.getHits().getHits()) - .map(QueryRulesIndexService::hitToQueryRulesetListItem) + .map(searchHit -> QueryRulesIndexService.hitToQueryRulesetListItem(searchHit.getSourceAsMap())) .toList(); return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } - private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { - final Map sourceMap = searchHit.getSourceAsMap(); + private static QueryRulesetListItem hitToQueryRulesetListItem(final Map sourceMap) { final String rulesetId = (String) sourceMap.get(QueryRuleset.ID_FIELD.getPreferredName()); @SuppressWarnings("unchecked") final List> rules = ((List>) sourceMap.get(QueryRuleset.RULES_FIELD.getPreferredName())); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java index 528204f4132ea..f402278197207 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/QueryRuleRetrieverBuilder.java @@ -164,13 +164,17 @@ protected QueryRuleRetrieverBuilder clone(List newChildRetrieve } @Override - protected RankDoc[] combineInnerRetrieverResults(List rankResults) { + protected RankDoc[] combineInnerRetrieverResults(List rankResults, boolean explain) { assert rankResults.size() == 1; ScoreDoc[] scoreDocs = rankResults.getFirst(); RankDoc[] rankDocs = new RuleQueryRankDoc[scoreDocs.length]; for (int i = 0; i < scoreDocs.length; i++) { ScoreDoc scoreDoc = scoreDocs[i]; - rankDocs[i] = new RuleQueryRankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex, rulesetIds, matchCriteria); + if (explain) { + rankDocs[i] = new RuleQueryRankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex, rulesetIds, matchCriteria); + } else { + rankDocs[i] = new RuleQueryRankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex); + } rankDocs[i].rank = i + 1; } return rankDocs; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java index 9c329f20f0cb2..59f3d9aed31ce 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -27,6 +28,10 @@ public class RuleQueryRankDoc extends RankDoc { public final List rulesetIds; public final Map matchCriteria; + public RuleQueryRankDoc(int doc, float score, int shardIndex) { + this(doc, score, shardIndex, null, null); + } + public RuleQueryRankDoc(int doc, float score, int shardIndex, List rulesetIds, Map matchCriteria) { super(doc, score, shardIndex); this.rulesetIds = rulesetIds; @@ -35,13 +40,20 @@ public RuleQueryRankDoc(int doc, float score, int shardIndex, List rules public RuleQueryRankDoc(StreamInput in) throws IOException { super(in); - rulesetIds = in.readStringCollectionAsImmutableList(); - matchCriteria = in.readGenericMap(); + if (in.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN)) { + List inRulesetIds = in.readOptionalStringCollectionAsList(); + this.rulesetIds = inRulesetIds == null ? null : Collections.unmodifiableList(inRulesetIds); + boolean matchCriteriaExists = in.readBoolean(); + this.matchCriteria = matchCriteriaExists ? in.readGenericMap() : null; + } else { + rulesetIds = in.readStringCollectionAsImmutableList(); + matchCriteria = in.readGenericMap(); + } } @Override public Explanation explain(Explanation[] sources, String[] queryNames) { - + assert rulesetIds != null && matchCriteria != null; return Explanation.match( score, "query rules evaluated rules from rulesets " + rulesetIds + " and match criteria " + matchCriteria, @@ -51,8 +63,16 @@ public Explanation explain(Explanation[] sources, String[] queryNames) { @Override public void doWriteTo(StreamOutput out) throws IOException { - out.writeStringCollection(rulesetIds); - out.writeGenericMap(matchCriteria); + if (out.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN)) { + out.writeOptionalStringCollection(rulesetIds); + out.writeBoolean(matchCriteria != null); + if (matchCriteria != null) { + out.writeGenericMap(matchCriteria); + } + } else { + out.writeStringCollection(rulesetIds == null ? Collections.emptyList() : rulesetIds); + out.writeGenericMap(matchCriteria == null ? Collections.emptyMap() : matchCriteria); + } } @Override @@ -89,10 +109,14 @@ public String getWriteableName() { @Override protected void doToXContent(XContentBuilder builder, Params params) throws IOException { - builder.array("rulesetIds", rulesetIds.toArray()); - builder.startObject("matchCriteria"); - builder.mapContents(matchCriteria); - builder.endObject(); + if (rulesetIds != null) { + builder.array("rulesetIds", rulesetIds.toArray()); + } + if (matchCriteria != null) { + builder.startObject("matchCriteria"); + builder.mapContents(matchCriteria); + builder.endObject(); + } } @Override diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 8d2050fb43044..2498b621b73e4 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -348,4 +348,31 @@ tasks.named('stringTemplates').configure { it.inputFile = inInputFile it.outputFile = "org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InBytesRefEvaluator.java" } + + File coalesceInputFile = file("src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st") + template { + it.properties = booleanProperties + it.inputFile = coalesceInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java" + } + template { + it.properties = intProperties + it.inputFile = coalesceInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java" + } + template { + it.properties = longProperties + it.inputFile = coalesceInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java" + } + template { + it.properties = doubleProperties + it.inputFile = coalesceInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = coalesceInputFile + it.outputFile = "org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java" + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 5d2d6c97a11f1..b08b80acc6976 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -223,6 +223,14 @@ sealed interface Builder extends Block.Builder, BlockLoader.BooleanBuilder permi */ Builder copyFrom(BooleanBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(BooleanBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 32627a0e0d36b..7f4705ddecb27 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BitArray; @@ -85,7 +86,11 @@ public BooleanBlockBuilder copyFrom(Block block, int beginInclusive, int endExcl /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(BooleanBlock, int)}. + *

*/ + @Override public BooleanBlockBuilder copyFrom(BooleanBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ public BooleanBlockBuilder copyFrom(BooleanBlock block, int beginInclusive, int private void copyFromBlock(BooleanBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBoolean(block.getBoolean(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ private void copyFromVector(BooleanVector vector, int beginInclusive, int endExc } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public BooleanBlockBuilder copyFrom(BooleanBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendBoolean(block.getBoolean(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendBoolean(block.getBoolean(i++)); + } + endPositionEntry(); + return this; + } + @Override public BooleanBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 6fe45f33a7df6..6661895722725 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -228,6 +228,16 @@ sealed interface Builder extends Block.Builder, BlockLoader.BytesRefBuilder perm */ Builder copyFrom(BytesRefBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. + */ + Builder copyFrom(BytesRefBlock block, int position, BytesRef scratch); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 6232cbdd2717c..0a2b350780405 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -88,7 +88,11 @@ public BytesRefBlockBuilder copyFrom(Block block, int beginInclusive, int endExc /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(BytesRefBlock, int, BytesRef scratch)}. + *

*/ + @Override public BytesRefBlockBuilder copyFrom(BytesRefBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -105,21 +109,7 @@ public BytesRefBlockBuilder copyFrom(BytesRefBlock block, int beginInclusive, in private void copyFromBlock(BytesRefBlock block, int beginInclusive, int endExclusive) { BytesRef scratch = new BytesRef(); for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendBytesRef(block.getBytesRef(i++, scratch)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p, scratch); } } @@ -130,6 +120,39 @@ private void copyFromVector(BytesRefVector vector, int beginInclusive, int endEx } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public BytesRefBlockBuilder copyFrom(BytesRefBlock block, int position, BytesRef scratch) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendBytesRef(block.getBytesRef(i++, scratch)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendBytesRef(block.getBytesRef(i++, scratch)); + } + endPositionEntry(); + return this; + } + @Override public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 395ccd412fabb..04df6253662a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -217,6 +217,14 @@ sealed interface Builder extends Block.Builder, BlockLoader.DoubleBuilder permit */ Builder copyFrom(DoubleBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(DoubleBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 5921c2daa9f92..8ecc9b91e0ffe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.DoubleArray; @@ -85,7 +86,11 @@ public DoubleBlockBuilder copyFrom(Block block, int beginInclusive, int endExclu /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(DoubleBlock, int)}. + *

*/ + @Override public DoubleBlockBuilder copyFrom(DoubleBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ public DoubleBlockBuilder copyFrom(DoubleBlock block, int beginInclusive, int en private void copyFromBlock(DoubleBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendDouble(block.getDouble(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ private void copyFromVector(DoubleVector vector, int beginInclusive, int endExcl } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public DoubleBlockBuilder copyFrom(DoubleBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendDouble(block.getDouble(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendDouble(block.getDouble(i++)); + } + endPositionEntry(); + return this; + } + @Override public DoubleBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java index 633c9f309901a..0679e38b63219 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlock.java @@ -216,6 +216,14 @@ sealed interface Builder extends Block.Builder, BlockLoader.FloatBuilder permits */ Builder copyFrom(FloatBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(FloatBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java index 9c1e7aba49a21..8504912adc057 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.FloatArray; @@ -85,7 +86,11 @@ public FloatBlockBuilder copyFrom(Block block, int beginInclusive, int endExclus /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(FloatBlock, int)}. + *

*/ + @Override public FloatBlockBuilder copyFrom(FloatBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ public FloatBlockBuilder copyFrom(FloatBlock block, int beginInclusive, int endE private void copyFromBlock(FloatBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendFloat(block.getFloat(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ private void copyFromVector(FloatVector vector, int beginInclusive, int endExclu } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public FloatBlockBuilder copyFrom(FloatBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendFloat(block.getFloat(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendFloat(block.getFloat(i++)); + } + endPositionEntry(); + return this; + } + @Override public FloatBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 7c77d9965391e..6af61695929df 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -216,6 +216,14 @@ sealed interface Builder extends Block.Builder, BlockLoader.IntBuilder permits I */ Builder copyFrom(IntBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(IntBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 85f943004de29..31449b6f1cd72 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.IntArray; @@ -85,7 +86,11 @@ public IntBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusiv /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(IntBlock, int)}. + *

*/ + @Override public IntBlockBuilder copyFrom(IntBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ public IntBlockBuilder copyFrom(IntBlock block, int beginInclusive, int endExclu private void copyFromBlock(IntBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendInt(block.getInt(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ private void copyFromVector(IntVector vector, int beginInclusive, int endExclusi } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public IntBlockBuilder copyFrom(IntBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendInt(block.getInt(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendInt(block.getInt(i++)); + } + endPositionEntry(); + return this; + } + @Override public IntBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 6c88da8860ca7..090efd9a31579 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -217,6 +217,14 @@ sealed interface Builder extends Block.Builder, BlockLoader.LongBuilder permits */ Builder copyFrom(LongBlock block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + */ + Builder copyFrom(LongBlock block, int position); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index d24ae214da63a..bf25347edd989 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.LongArray; @@ -85,7 +86,11 @@ public LongBlockBuilder copyFrom(Block block, int beginInclusive, int endExclusi /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom(LongBlock, int)}. + *

*/ + @Override public LongBlockBuilder copyFrom(LongBlock block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -101,21 +106,7 @@ public LongBlockBuilder copyFrom(LongBlock block, int beginInclusive, int endExc private void copyFromBlock(LongBlock block, int beginInclusive, int endExclusive) { for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - appendLong(block.getLong(i++)); - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p); } } @@ -125,6 +116,37 @@ private void copyFromVector(LongVector vector, int beginInclusive, int endExclus } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public LongBlockBuilder copyFrom(LongBlock block, int position) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + appendLong(block.getLong(i++)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + appendLong(block.getLong(i++)); + } + endPositionEntry(); + return this; + } + @Override public LongBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index edf54a829deba..de87c08f7ceb1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -280,6 +280,11 @@ interface Builder extends BlockLoader.Builder, Releasable { /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single position copies use the faster + * {@link IntBlockBuilder#copyFrom(IntBlock, int)}, + * {@link LongBlockBuilder#copyFrom(LongBlock, int)}, etc. + *

*/ Builder copyFrom(Block block, int beginInclusive, int endExclusive); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 67e4ac4bb334f..6c1616c370721 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -288,6 +288,18 @@ $endif$ */ Builder copyFrom($Type$Block block, int beginInclusive, int endExclusive); + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. +$if(BytesRef)$ + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. +$endif$ + */ + Builder copyFrom($Type$Block block, int position$if(BytesRef)$, BytesRef scratch$endif$); + @Override Builder appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 8397a0f5274f1..d60e1de179d20 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.core.Releasables; $else$ +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.$Array$; @@ -123,7 +124,11 @@ $endif$ /** * Copy the values in {@code block} from {@code beginInclusive} to * {@code endExclusive} into this builder. + *

+ * For single-position copies see {@link #copyFrom($Type$Block, int$if(BytesRef)$, BytesRef scratch$endif$)}. + *

*/ + @Override public $Type$BlockBuilder copyFrom($Type$Block block, int beginInclusive, int endExclusive) { if (endExclusive > block.getPositionCount()) { throw new IllegalArgumentException("can't copy past the end [" + endExclusive + " > " + block.getPositionCount() + "]"); @@ -142,25 +147,7 @@ $if(BytesRef)$ BytesRef scratch = new BytesRef(); $endif$ for (int p = beginInclusive; p < endExclusive; p++) { - if (block.isNull(p)) { - appendNull(); - continue; - } - int count = block.getValueCount(p); - if (count > 1) { - beginPositionEntry(); - } - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { -$if(BytesRef)$ - appendBytesRef(block.getBytesRef(i++, scratch)); -$else$ - append$Type$(block.get$Type$(i++)); -$endif$ - } - if (count > 1) { - endPositionEntry(); - } + copyFrom(block, p$if(BytesRef)$, scratch$endif$); } } @@ -177,6 +164,41 @@ $endif$ } } + /** + * Copy the values in {@code block} at {@code position}. If this position + * has a single value, this'll copy a single value. If this positions has + * many values, it'll copy all of them. If this is {@code null}, then it'll + * copy the {@code null}. +$if(BytesRef)$ + * @param scratch Scratch string used to prevent allocation. Share this + between many calls to this function. +$endif$ + *

+ * Note that there isn't a version of this method on {@link Block.Builder} that takes + * {@link Block}. That'd be quite slow, running position by position. And it's important + * to know if you are copying {@link BytesRef}s so you can have the scratch. + *

+ */ + @Override + public $Type$BlockBuilder copyFrom($Type$Block block, int position$if(BytesRef)$, BytesRef scratch$endif$) { + if (block.isNull(position)) { + appendNull(); + return this; + } + int count = block.getValueCount(position); + int i = block.getFirstValueIndex(position); + if (count == 1) { + append$Type$(block.get$Type$(i++$if(BytesRef)$, scratch$endif$)); + return this; + } + beginPositionEntry(); + for (int v = 0; v < count; v++) { + append$Type$(block.get$Type$(i++$if(BytesRef)$, scratch$endif$)); + } + endPositionEntry(); + return this; + } + @Override public $Type$BlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { this.mvOrdering = mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index 349ce7b00ff10..2573baf78b16a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -96,12 +96,18 @@ public Block eval(Page page) { public void close() { } + + @Override + public String toString() { + return CONSTANT_NULL_NAME; + } }; } @Override public String toString() { - return "ConstantNull"; + return CONSTANT_NULL_NAME; } }; + private static final String CONSTANT_NULL_NAME = "ConstantNull"; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 62cc4daf5fde5..d1a5d1757bc90 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -76,6 +76,7 @@ public final class ExchangeService extends AbstractLifecycleComponent { private final BlockFactory blockFactory; private final Map sinks = ConcurrentCollections.newConcurrentMap(); + private final Map exchangeSources = ConcurrentCollections.newConcurrentMap(); public ExchangeService(Settings settings, ThreadPool threadPool, String executorName, BlockFactory blockFactory) { this.threadPool = threadPool; @@ -172,6 +173,32 @@ public static void openExchange( ); } + /** + * Remember the exchange source handler for the given session ID. + * This can be used for async/stop requests. + */ + public void addExchangeSourceHandler(String sessionId, ExchangeSourceHandler sourceHandler) { + exchangeSources.put(sessionId, sourceHandler); + } + + public ExchangeSourceHandler removeExchangeSourceHandler(String sessionId) { + return exchangeSources.remove(sessionId); + } + + /** + * Finishes the session early, i.e., before all sources are finished. + * It is called by async/stop API and should be called on the node that coordinates the async request. + * It will close all sources and return the results - unlike cancel, this does not discard the results. + */ + public void finishSessionEarly(String sessionId, ActionListener listener) { + ExchangeSourceHandler exchangeSource = removeExchangeSourceHandler(sessionId); + if (exchangeSource != null) { + exchangeSource.finishEarly(false, listener); + } else { + listener.onResponse(null); + } + } + private static class OpenExchangeRequest extends TransportRequest { private final String sessionId; private final int exchangeBuffer; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java index 679e3441fb45f..1d3c8df914bc6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; @@ -92,7 +93,16 @@ private void assertEvens(Block block) { Block.Builder builder = elementType.newBlockBuilder(block.getPositionCount() / 2, blockFactory); List> expected = new ArrayList<>(); for (int i = 0; i < block.getPositionCount(); i += 2) { - builder.copyFrom(block, i, i + 1); + switch (elementType) { + case BOOLEAN -> ((BooleanBlockBuilder) builder).copyFrom((BooleanBlock) block, i); + case BYTES_REF -> ((BytesRefBlockBuilder) builder).copyFrom((BytesRefBlock) block, i, new BytesRef()); + case DOUBLE -> ((DoubleBlockBuilder) builder).copyFrom((DoubleBlock) block, i); + case FLOAT -> ((FloatBlockBuilder) builder).copyFrom((FloatBlock) block, i); + case INT -> ((IntBlockBuilder) builder).copyFrom((IntBlock) block, i); + case LONG -> ((LongBlockBuilder) builder).copyFrom((LongBlock) block, i); + default -> throw new IllegalArgumentException("unsupported type: " + elementType); + } + expected.add(valuesAtPositions(block, i, i + 1).get(0)); } assertBlockValues(builder.build(), expected); diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java index b45ef45914985..0a6f73ee648df 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.Locale; +import java.util.Map; import static org.elasticsearch.core.TimeValue.timeValueNanos; import static org.hamcrest.Matchers.allOf; @@ -50,8 +51,9 @@ protected Response runESQLCommand(String user, String command) throws IOExceptio } @Override - protected MapMatcher responseMatcher() { - return super.responseMatcher().entry("is_running", equalTo(false)).entry("id", allOf(notNullValue(), instanceOf(String.class))); + protected MapMatcher responseMatcher(Map result) { + return super.responseMatcher(result).entry("is_running", equalTo(false)) + .entry("id", allOf(notNullValue(), instanceOf(String.class))); } @Override diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index d8e3b0cccf394..7d96c400cb659 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -165,8 +165,8 @@ public void indexDocuments() throws IOException { } } - protected MapMatcher responseMatcher() { - return matchesMap(); + protected MapMatcher responseMatcher(Map result) { + return getResultMatcher(result); } public void testAllowedIndices() throws Exception { @@ -182,10 +182,7 @@ public void testAllowedIndices() throws Exception { Response resp = runESQLCommand(user, "from index-user1 | stats sum=sum(value)"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher mapMatcher = responseMatcher(); - if (responseMap.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) responseMap.get("took")).intValue()); - } + MapMatcher mapMatcher = responseMatcher(responseMap); MapMatcher matcher = mapMatcher.entry("columns", List.of(Map.of("name", "sum", "type", "double"))) .entry("values", List.of(List.of(43.0d))); assertMap(responseMap, matcher); @@ -195,10 +192,7 @@ public void testAllowedIndices() throws Exception { Response resp = runESQLCommand(user, "from index-user2 | stats sum=sum(value)"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher mapMatcher = responseMatcher(); - if (responseMap.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) responseMap.get("took")).intValue()); - } + MapMatcher mapMatcher = responseMatcher(responseMap); MapMatcher matcher = mapMatcher.entry("columns", List.of(Map.of("name", "sum", "type", "double"))) .entry("values", List.of(List.of(72.0d))); assertMap(responseMap, matcher); @@ -208,10 +202,7 @@ public void testAllowedIndices() throws Exception { Response resp = runESQLCommand("metadata1_read2", "from " + index + " | stats sum=sum(value)"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher mapMatcher = responseMatcher(); - if (responseMap.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) responseMap.get("took")).intValue()); - } + MapMatcher mapMatcher = responseMatcher(responseMap); MapMatcher matcher = mapMatcher.entry("columns", List.of(Map.of("name", "sum", "type", "double"))) .entry("values", List.of(List.of(72.0d))); assertMap(responseMap, matcher); @@ -226,9 +217,10 @@ public void testAliases() throws Exception { ); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher matcher = responseMatcher().entry("took", ((Integer) responseMap.get("took")).intValue()) - .entry("columns", List.of(Map.of("name", "sum", "type", "double"), Map.of("name", "index", "type", "keyword"))) - .entry("values", List.of(List.of(72.0d, "index-user2"))); + MapMatcher matcher = responseMatcher(responseMap).entry( + "columns", + List.of(Map.of("name", "sum", "type", "double"), Map.of("name", "index", "type", "keyword")) + ).entry("values", List.of(List.of(72.0d, "index-user2"))); assertMap(responseMap, matcher); } } @@ -238,16 +230,14 @@ public void testAliasFilter() throws Exception { Response resp = runESQLCommand("alias_user1", "from " + index + " METADATA _index" + "| KEEP _index, org, value | LIMIT 10"); assertOK(resp); Map responseMap = entityAsMap(resp); - MapMatcher matcher = responseMatcher().entry("took", ((Integer) responseMap.get("took")).intValue()) - .entry( - "columns", - List.of( - Map.of("name", "_index", "type", "keyword"), - Map.of("name", "org", "type", "keyword"), - Map.of("name", "value", "type", "double") - ) + MapMatcher matcher = responseMatcher(responseMap).entry( + "columns", + List.of( + Map.of("name", "_index", "type", "keyword"), + Map.of("name", "org", "type", "keyword"), + Map.of("name", "value", "type", "double") ) - .entry("values", List.of(List.of("index-user1", "sales", 31.0d))); + ).entry("values", List.of(List.of("index-user1", "sales", 31.0d))); assertMap(responseMap, matcher); } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index 6e43d40a3005a..b838d8ae284a4 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -37,12 +37,8 @@ import java.util.stream.Stream; import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.xpack.esql.ccq.Clusters.REMOTE_CLUSTER_NAME; -import static org.hamcrest.Matchers.any; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.*; @ThreadLeakFilters(filters = TestClustersThreadFilter.class) public class MultiClustersIT extends ESRestTestCase { @@ -159,6 +155,17 @@ private Map runEsql(RestEsqlTestCase.RequestObjectBuilder reques } } + private void assertResultMap(boolean includeCCSMetadata, Map result, C columns, V values, boolean remoteOnly) { + MapMatcher mapMatcher = getResultMatcher(ccsMetadataAvailable(), result.containsKey("is_partial")); + if (includeCCSMetadata) { + mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); + } + assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + if (includeCCSMetadata) { + assertClusterDetailsMap(result, remoteOnly); + } + } + public void testCount() throws Exception { { boolean includeCCSMetadata = includeCCSMetadata(); @@ -166,17 +173,7 @@ public void testCount() throws Exception { var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(localDocs.size() + remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, false); - } + assertResultMap(includeCCSMetadata, result, columns, values, false); } { boolean includeCCSMetadata = includeCCSMetadata(); @@ -184,17 +181,7 @@ public void testCount() throws Exception { var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, true); - } + assertResultMap(includeCCSMetadata, result, columns, values, true); } } @@ -207,17 +194,7 @@ public void testUngroupedAggs() throws Exception { var values = List.of(List.of(Math.toIntExact(sum))); // check all sections of map except _cluster/details - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, false); - } + assertResultMap(includeCCSMetadata, result, columns, values, false); } { boolean includeCCSMetadata = includeCCSMetadata(); @@ -226,17 +203,7 @@ public void testUngroupedAggs() throws Exception { long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); var values = List.of(List.of(Math.toIntExact(sum))); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, true); - } + assertResultMap(includeCCSMetadata, result, columns, values, true); } { assumeTrue("requires ccs metadata", ccsMetadataAvailable()); @@ -245,15 +212,7 @@ public void testUngroupedAggs() throws Exception { long sum = remoteDocs.stream().mapToLong(d -> d.data).sum(); var values = List.of(List.of(Math.toIntExact(sum))); - MapMatcher mapMatcher = matchesMap(); - assertMap( - result, - mapMatcher.entry("columns", columns) - .entry("values", values) - .entry("took", greaterThanOrEqualTo(0)) - .entry("_clusters", any(Map.class)) - ); - assertClusterDetailsMap(result, true); + assertResultMap(true, result, columns, values, true); } } @@ -325,17 +284,7 @@ public void testGroupedAggs() throws Exception { .map(e -> List.of(Math.toIntExact(e.getValue()), e.getKey())) .toList(); - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, false); - } + assertResultMap(includeCCSMetadata, result, columns, values, false); } { boolean includeCCSMetadata = includeCCSMetadata(); @@ -353,17 +302,7 @@ public void testGroupedAggs() throws Exception { .toList(); // check all sections of map except _clusters/details - MapMatcher mapMatcher = matchesMap(); - if (includeCCSMetadata) { - mapMatcher = mapMatcher.entry("_clusters", any(Map.class)); - } - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); - if (includeCCSMetadata) { - assertClusterDetailsMap(result, true); - } + assertResultMap(includeCCSMetadata, result, columns, values, true); } } @@ -378,11 +317,8 @@ public void testIndexPattern() throws Exception { Map result = run("FROM " + indexPattern + " | STATS c = COUNT(*)", false); var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(localDocs.size() + remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + + assertResultMap(false, result, columns, values, false); } { String indexPattern = randomFrom("*:test-remote-index", "*:test-remote-*", "*:test-*"); @@ -390,11 +326,7 @@ public void testIndexPattern() throws Exception { var columns = List.of(Map.of("name", "c", "type", "long")); var values = List.of(List.of(remoteDocs.size())); - MapMatcher mapMatcher = matchesMap(); - if (ccsMetadataAvailable()) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - assertMap(result, mapMatcher.entry("columns", columns).entry("values", values)); + assertResultMap(false, result, columns, values, false); } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index cae9e1ba8eb66..601ce819224b5 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -80,12 +79,10 @@ public void testBasicEsql() throws IOException { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } Map result = runEsql(builder); - assertEquals(3, result.size()); + Map colA = Map.of("name", "avg(value)", "type", "double"); - assertEquals(List.of(colA), result.get("columns")); - assertEquals(List.of(List.of(499.5d)), result.get("values")); + assertResultMap(result, List.of(colA), List.of(List.of(499.5d))); assertTrue(result.containsKey("took")); - assertThat(((Number) result.get("took")).longValue(), greaterThanOrEqualTo(0L)); } public void testInvalidPragma() throws IOException { @@ -118,11 +115,8 @@ public void testDoNotLogWithInfo() throws IOException { setLoggingLevel("INFO"); RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_NOT_LOG_ME = 1"); Map result = runEsql(builder); - assertEquals(3, result.size()); - assertThat(((Integer) result.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); - assertEquals(List.of(colA), result.get("columns")); - assertEquals(List.of(List.of(1)), result.get("values")); + assertResultMap(result, List.of(colA), List.of(List.of(1))); for (int i = 0; i < cluster.getNumNodes(); i++) { try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { Streams.readAllLines(log, line -> assertThat(line, not(containsString("DO_NOT_LOG_ME")))); @@ -138,11 +132,8 @@ public void testDoLogWithDebug() throws IOException { setLoggingLevel("DEBUG"); RequestObjectBuilder builder = requestObjectBuilder().query("ROW DO_LOG_ME = 1"); Map result = runEsql(builder); - assertEquals(3, result.size()); - assertThat(((Integer) result.get("took")).intValue(), greaterThanOrEqualTo(0)); Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); - assertEquals(List.of(colA), result.get("columns")); - assertEquals(List.of(List.of(1)), result.get("values")); + assertResultMap(result, List.of(colA), List.of(List.of(1))); boolean[] found = new boolean[] { false }; for (int i = 0; i < cluster.getNumNodes(); i++) { try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { @@ -289,13 +280,11 @@ public void testProfile() throws IOException { builder.pragmas(Settings.builder().put("data_partitioning", "shard").build()); } Map result = runEsql(builder); - MapMatcher mapMatcher = matchesMap(); - assertMap( + assertResultMap( result, - mapMatcher.entry("columns", matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double"))) - .entry("values", List.of(List.of(499.5d))) - .entry("profile", matchesMap().entry("drivers", instanceOf(List.class))) - .entry("took", greaterThanOrEqualTo(0)) + getResultMatcher(result).entry("profile", matchesMap().entry("drivers", instanceOf(List.class))), + matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double")), + equalTo(List.of(List.of(499.5d))) ); List> signatures = new ArrayList<>(); @@ -373,24 +362,19 @@ public void testInlineStatsProfile() throws IOException { } Map result = runEsql(builder); - MapMatcher mapMatcher = matchesMap(); ListMatcher values = matchesList(); for (int i = 0; i < 1000; i++) { values = values.item(matchesList().item("2020-12-12T00:00:00.000Z").item("value" + i).item("value" + i).item(i).item(499.5)); } - assertMap( + assertResultMap( result, - mapMatcher.entry( - "columns", - matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) - .item(matchesMap().entry("name", "test").entry("type", "text")) - .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword")) - .item(matchesMap().entry("name", "value").entry("type", "long")) - .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")) - ) - .entry("values", values) - .entry("profile", matchesMap().entry("drivers", instanceOf(List.class))) - .entry("took", greaterThanOrEqualTo(0)) + getResultMatcher(result).entry("profile", matchesMap().entry("drivers", instanceOf(List.class))), + matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date")) + .item(matchesMap().entry("name", "test").entry("type", "text")) + .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword")) + .item(matchesMap().entry("name", "value").entry("type", "long")) + .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")), + values ); List> signatures = new ArrayList<>(); @@ -484,20 +468,15 @@ public void testForceSleepsProfile() throws IOException { for (int group2 = 0; group2 < 10; group2++) { expectedValues.add(List.of(1.0, 1, 1, 0, group2)); } - MapMatcher mapMatcher = matchesMap(); - assertMap( + assertResultMap( result, - mapMatcher.entry( - "columns", - matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double")) - .item(matchesMap().entry("name", "MAX(value)").entry("type", "long")) - .item(matchesMap().entry("name", "MIN(value)").entry("type", "long")) - .item(matchesMap().entry("name", "group1").entry("type", "long")) - .item(matchesMap().entry("name", "group2").entry("type", "long")) - ) - .entry("values", expectedValues) - .entry("profile", matchesMap().entry("drivers", instanceOf(List.class))) - .entry("took", greaterThanOrEqualTo(0)) + getResultMatcher(result).entry("profile", matchesMap().entry("drivers", instanceOf(List.class))), + matchesList().item(matchesMap().entry("name", "AVG(value)").entry("type", "double")) + .item(matchesMap().entry("name", "MAX(value)").entry("type", "long")) + .item(matchesMap().entry("name", "MIN(value)").entry("type", "long")) + .item(matchesMap().entry("name", "group1").entry("type", "long")) + .item(matchesMap().entry("name", "group2").entry("type", "long")), + equalTo(expectedValues) ); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index 813354db697e1..5e1755adbe637 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -17,13 +17,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.BlockLoader; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ListMatcher; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -51,7 +50,6 @@ import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.runEsqlSync; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** * Creates indices with many different mappings and fetches values from them to make sure @@ -304,11 +302,7 @@ public void testFlattenedUnsupported() throws IOException { {"flattened": {"a": "foo"}}"""); Map result = runEsql("FROM test* | LIMIT 2"); - assertMap( - result, - matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("flattened", "unsupported"))) - .entry("values", List.of(matchesList().item(null))) - ); + assertResultMap(result, List.of(columnInfo("flattened", "unsupported")), List.of(matchesList().item(null))); } public void testEmptyMapping() throws IOException { @@ -322,7 +316,7 @@ public void testEmptyMapping() throws IOException { // TODO this is broken in main too // Map result = runEsqlSync(new RestEsqlTestCase.RequestObjectBuilder().query("FROM test* | LIMIT 2")); - // assertMap( + // assertResultMap( // result, // matchesMap().entry("columns", List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))) // .entry("values", List.of(matchesList().item(null).item(null))) @@ -345,13 +339,10 @@ public void testEmptyMapping() throws IOException { public void testTextFieldWithKeywordSubfield() throws IOException { String value = randomAlphaOfLength(20); Map result = new Test("text").storeAndDocValues(randomBoolean(), null).sub("raw", keywordTest()).roundTrip(value); - - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.raw", "keyword")) - ).entry("values", List.of(matchesList().item(value).item(value))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.raw", "keyword")), + List.of(matchesList().item(value).item(value)) ); } @@ -372,12 +363,10 @@ public void testTextFieldWithIntegerSubfield() throws IOException { int value = randomInt(); Map result = textTest().sub("int", intTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")) - ).entry("values", List.of(matchesList().item(Integer.toString(value)).item(value))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")), + List.of(matchesList().item(Integer.toString(value)).item(value)) ); } @@ -398,12 +387,10 @@ public void testTextFieldWithIntegerSubfieldMalformed() throws IOException { String value = randomAlphaOfLength(5); Map result = textTest().sourceMode(SourceMode.DEFAULT).sub("int", intTest().ignoreMalformed(true)).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")) - ).entry("values", List.of(matchesList().item(value).item(null))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.int", "integer")), + List.of(matchesList().item(value).item(null)) ); } @@ -424,12 +411,10 @@ public void testTextFieldWithIpSubfield() throws IOException { String value = NetworkAddress.format(randomIp(randomBoolean())); Map result = textTest().sub("ip", ipTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")) - ).entry("values", List.of(matchesList().item(value).item(value))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")), + List.of(matchesList().item(value).item(value)) ); } @@ -450,12 +435,10 @@ public void testTextFieldWithIpSubfieldMalformed() throws IOException { String value = randomAlphaOfLength(10); Map result = textTest().sourceMode(SourceMode.DEFAULT).sub("ip", ipTest().ignoreMalformed(true)).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")) - ).entry("values", List.of(matchesList().item(value).item(null))) + List.of(columnInfo("text_field", "text"), columnInfo("text_field.ip", "ip")), + List.of(matchesList().item(value).item(null)) ); } @@ -477,12 +460,10 @@ public void testIntFieldWithTextOrKeywordSubfield() throws IOException { boolean text = randomBoolean(); Map result = intTest().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(value).item(Integer.toString(value)))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(value).item(Integer.toString(value))) ); } @@ -504,12 +485,10 @@ public void testIntFieldWithTextOrKeywordSubfieldMalformed() throws IOException boolean text = randomBoolean(); Map result = intTest().forceIgnoreMalformed().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(null).item(value))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(null).item(value)) ); } @@ -531,12 +510,10 @@ public void testIpFieldWithTextOrKeywordSubfield() throws IOException { boolean text = randomBoolean(); Map result = ipTest().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(value).item(value))) + List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(value).item(value)) ); } @@ -558,12 +535,10 @@ public void testIpFieldWithTextOrKeywordSubfieldMalformed() throws IOException { boolean text = randomBoolean(); Map result = ipTest().forceIgnoreMalformed().sub("str", text ? textTest() : keywordTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")) - ).entry("values", List.of(matchesList().item(null).item(value))) + List.of(columnInfo("ip_field", "ip"), columnInfo("ip_field.str", text ? "text" : "keyword")), + List.of(matchesList().item(null).item(value)) ); } @@ -585,12 +560,10 @@ public void testIntFieldWithByteSubfield() throws IOException { byte value = randomByte(); Map result = intTest().sub("byte", byteTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")) - ).entry("values", List.of(matchesList().item((int) value).item((int) value))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")), + List.of(matchesList().item((int) value).item((int) value)) ); } @@ -614,12 +587,10 @@ public void testIntFieldWithByteSubfieldTooBig() throws IOException { .sub("byte", byteTest().ignoreMalformed(true)) .roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")) - ).entry("values", List.of(matchesList().item(value).item(null))) + List.of(columnInfo("integer_field", "integer"), columnInfo("integer_field.byte", "integer")), + List.of(matchesList().item(value).item(null)) ); } @@ -641,23 +612,13 @@ public void testByteFieldWithIntSubfield() throws IOException { byte value = randomByte(); Map result = byteTest().sub("int", intTest()).roundTrip(value); - assertMap( + assertResultMap( result, - matchesMapWithOptionalTook(result.get("took")).entry( - "columns", - List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer")) - ).entry("values", List.of(matchesList().item((int) value).item((int) value))) + List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer")), + List.of(matchesList().item((int) value).item((int) value)) ); } - static MapMatcher matchesMapWithOptionalTook(Object tookTimeValue) { - MapMatcher mapMatcher = matchesMap(); - if (tookTimeValue instanceof Number) { - mapMatcher = mapMatcher.entry("took", greaterThanOrEqualTo(0)); - } - return mapMatcher; - } - /** *
      * "byte_field": {
@@ -676,12 +637,10 @@ public void testByteFieldWithIntSubfieldTooBig() throws IOException {
         int value = randomValueOtherThanMany((Integer v) -> (Byte.MIN_VALUE <= v) && (v <= Byte.MAX_VALUE), ESTestCase::randomInt);
         Map result = byteTest().forceIgnoreMalformed().sub("int", intTest()).roundTrip(value);
 
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer"))
-            ).entry("values", List.of(matchesList().item(null).item(value)))
+            List.of(columnInfo("byte_field", "integer"), columnInfo("byte_field.int", "integer")),
+            List.of(matchesList().item(null).item(value))
         );
     }
 
@@ -708,11 +667,7 @@ public void testIncompatibleTypes() throws IOException {
             {"f": 1}""");
 
         Map result = runEsql("FROM test*");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("f", "unsupported")))
-                .entry("values", List.of(matchesList().item(null), matchesList().item(null)))
-        );
+        assertResultMap(result, List.of(columnInfo("f", "unsupported")), List.of(matchesList().item(null), matchesList().item(null)));
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT f | LIMIT 3"));
         String err = EntityUtils.toString(e.getResponse().getEntity());
         assertThat(
@@ -746,12 +701,10 @@ public void testDistinctInEachIndex() throws IOException {
             {"other": "o2"}""");
 
         Map result = runEsql("FROM test* | SORT file, other");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("file", "keyword"), columnInfo("other", "keyword"))
-            ).entry("values", List.of(matchesList().item("f1").item(null), matchesList().item(null).item("o2")))
+            List.of(columnInfo("file", "keyword"), columnInfo("other", "keyword")),
+            List.of(matchesList().item("f1").item(null), matchesList().item(null).item("o2"))
         );
     }
 
@@ -812,12 +765,10 @@ public void testMergeKeywordAndObject() throws IOException {
         );
 
         Map result = runEsql("FROM test* | SORT file.raw | LIMIT 2");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("file", "unsupported"), columnInfo("file.raw", "keyword"))
-            ).entry("values", List.of(matchesList().item(null).item("o2"), matchesList().item(null).item(null)))
+            List.of(columnInfo("file", "unsupported"), columnInfo("file.raw", "keyword")),
+            List.of(matchesList().item(null).item("o2"), matchesList().item(null).item(null))
         );
     }
 
@@ -859,12 +810,10 @@ public void testPropagateUnsupportedToSubFields() throws IOException {
         assertThat(err, containsString("Cannot use field [f.raw] with unsupported type [ip_range]"));
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))
-            ).entry("values", List.of(matchesList().item(null).item(null)))
+            List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported")),
+            List.of(matchesList().item(null).item(null))
         );
     }
 
@@ -924,12 +873,10 @@ public void testMergeUnsupportedAndObject() throws IOException {
         assertThat(err, containsString("Cannot use field [f.raw] with unsupported type [ip_range]"));
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported"))
-            ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item(null).item(null)))
+            List.of(columnInfo("f", "unsupported"), columnInfo("f.raw", "unsupported")),
+            List.of(matchesList().item(null).item(null), matchesList().item(null).item(null))
         );
     }
 
@@ -961,11 +908,7 @@ public void testIntegerDocValuesConflict() throws IOException {
             {"emp_no": 2}""");
 
         Map result = runEsql("FROM test* | SORT emp_no | LIMIT 2");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("emp_no", "integer")))
-                .entry("values", List.of(matchesList().item(1), matchesList().item(2)))
-        );
+        assertResultMap(result, List.of(columnInfo("emp_no", "integer")), List.of(matchesList().item(1), matchesList().item(2)));
     }
 
     /**
@@ -1007,11 +950,7 @@ public void testLongIntegerConflict() throws IOException {
         );
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("emp_no", "unsupported")))
-                .entry("values", List.of(matchesList().item(null), matchesList().item(null)))
-        );
+        assertResultMap(result, List.of(columnInfo("emp_no", "unsupported")), List.of(matchesList().item(null), matchesList().item(null)));
     }
 
     /**
@@ -1053,11 +992,7 @@ public void testIntegerShortConflict() throws IOException {
         );
 
         Map result = runEsql("FROM test* | LIMIT 2");
-        assertMap(
-            result,
-            matchesMapWithOptionalTook(result.get("took")).entry("columns", List.of(columnInfo("emp_no", "unsupported")))
-                .entry("values", List.of(matchesList().item(null), matchesList().item(null)))
-        );
+        assertResultMap(result, List.of(columnInfo("emp_no", "unsupported")), List.of(matchesList().item(null), matchesList().item(null)));
     }
 
     /**
@@ -1095,7 +1030,7 @@ public void testTypeConflictInObject() throws IOException {
             {"foo": {"emp_no": "cat"}}""");
 
         Map result = runEsql("FROM test* | LIMIT 3");
-        assertMap(result, matchesMap().entry("columns", List.of(columnInfo("foo.emp_no", "unsupported"))).extraOk());
+        assertMap(result, getResultMatcher(result).entry("columns", List.of(columnInfo("foo.emp_no", "unsupported"))).extraOk());
 
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT foo.emp_no | LIMIT 3"));
         String err = EntityUtils.toString(e.getResponse().getEntity());
@@ -1147,35 +1082,29 @@ public void testOneNestedSubField_AndSameNameSupportedField() throws IOException
             """);
 
         Map result = runEsql("FROM test");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", Collections.EMPTY_LIST)
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            Collections.EMPTY_LIST
         );
 
         index("test", """
             {"Responses.process.pid": 123,"process.parent.command_line":"run.bat"}""");
 
         result = runEsql("FROM test");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item("run.bat").item("run.bat"))
         );
 
         result = runEsql("""
             FROM test | where process.parent.command_line == "run.bat"
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item("run.bat").item("run.bat"))
         );
 
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test | SORT Responses.process.pid"));
@@ -1235,23 +1164,19 @@ public void testOneNestedSubField_AndSameNameSupportedField_TwoIndices() throws
             {"process.parent.command_line":"run.bat"}""");
 
         Map result = runEsql("FROM test* | SORT process.parent.command_line ASC NULLS FIRST");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat"))
         );
 
         result = runEsql("""
             FROM test* | where process.parent.command_line == "run.bat"
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text"))
-            ).entry("values", List.of(matchesList().item("run.bat").item("run.bat")))
+            List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")),
+            List.of(matchesList().item("run.bat").item("run.bat"))
         );
 
         ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT Responses.process.pid"));
@@ -1339,61 +1264,47 @@ public void testOneNestedField_AndSameNameSupportedField_TwoIndices() throws IOE
             {"Responses.process": 222,"process.parent.command_line":"run2.bat"}""");
 
         Map result = runEsql("FROM test* | SORT process.parent.command_line");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of(
+                matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
+                matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
             )
-                .entry(
-                    "values",
-                    List.of(
-                        matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
-                        matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
-                    )
-                )
         );
 
         result = runEsql("""
             FROM test* | where Responses.process.pid == 111
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
-            ).entry("values", List.of())
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of()
         );
 
         result = runEsql("FROM test* | SORT process.parent.command_line");
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of(
+                matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
+                matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
             )
-                .entry(
-                    "values",
-                    List.of(
-                        matchesList().item(null).item(null).item("run1.bat").item("run1.bat"),
-                        matchesList().item(222).item(222).item("run2.bat").item("run2.bat")
-                    )
-                )
         );
 
         result = runEsql("""
@@ -1401,17 +1312,15 @@ public void testOneNestedField_AndSameNameSupportedField_TwoIndices() throws IOE
             | SORT process.parent.command_line
             | WHERE Responses.process IS NULL
             """);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMapWithOptionalTook(result.get("took")).entry(
-                "columns",
-                List.of(
-                    columnInfo("Responses.process", "integer"),
-                    columnInfo("Responses.process.pid", "long"),
-                    columnInfo("process.parent.command_line", "keyword"),
-                    columnInfo("process.parent.command_line.text", "text")
-                )
-            ).entry("values", List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat")))
+            List.of(
+                columnInfo("Responses.process", "integer"),
+                columnInfo("Responses.process.pid", "long"),
+                columnInfo("process.parent.command_line", "keyword"),
+                columnInfo("process.parent.command_line.text", "text")
+            ),
+            List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat"))
         );
     }
 
@@ -1447,7 +1356,7 @@ private CheckedConsumer empNoInObject(String empNo
     private enum SourceMode {
         DEFAULT {
             @Override
-            void sourceMapping(XContentBuilder builder) {}
+            void sourceMapping(Settings.Builder builder) {}
 
             @Override
             boolean stored() {
@@ -1456,8 +1365,8 @@ boolean stored() {
         },
         STORED {
             @Override
-            void sourceMapping(XContentBuilder builder) throws IOException {
-                builder.startObject(SourceFieldMapper.NAME).field("mode", "stored").endObject();
+            void sourceMapping(Settings.Builder builder) throws IOException {
+                builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "stored");
             }
 
             @Override
@@ -1480,8 +1389,8 @@ boolean stored() {
          */
         SYNTHETIC {
             @Override
-            void sourceMapping(XContentBuilder builder) throws IOException {
-                builder.startObject(SourceFieldMapper.NAME).field("mode", "synthetic").endObject();
+            void sourceMapping(Settings.Builder builder) throws IOException {
+                builder.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), "synthetic");
             }
 
             @Override
@@ -1490,7 +1399,7 @@ boolean stored() {
             }
         };
 
-        abstract void sourceMapping(XContentBuilder builder) throws IOException;
+        abstract void sourceMapping(Settings.Builder builder) throws IOException;
 
         abstract boolean stored();
     }
@@ -1671,7 +1580,7 @@ void test(Object value, Object expectedValue) throws IOException {
                 values = values.item(expectedValue);
             }
 
-            assertMap(result, matchesMapWithOptionalTook(result.get("took")).entry("columns", columns).entry("values", List.of(values)));
+            assertResultMap(result, columns, List.of(values));
         }
 
         void createIndex(String name, String fieldName) throws IOException {
@@ -1680,8 +1589,10 @@ void createIndex(String name, String fieldName) throws IOException {
             }
             logger.info("source_mode: {}", sourceMode);
 
+            Settings.Builder settings = Settings.builder();
+            sourceMode.sourceMapping(settings);
+
             FieldExtractorTestCase.createIndex(name, index -> {
-                sourceMode.sourceMapping(index);
                 index.startObject("properties");
                 {
                     index.startObject(fieldName);
@@ -1783,6 +1694,16 @@ private static void createIndex(String name, CheckedConsumer mapping)
+        throws IOException {
+        XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject();
+        mapping.accept(index);
+        index.endObject();
+        String configStr = Strings.toString(index);
+        logger.info("index: {} {}", name, configStr);
+        ESRestTestCase.createIndex(name, setting, configStr);
+    }
+
     /**
      * Yaml adds newlines and some indentation which we don't want to match.
      */
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java
index 5e0aeb5b3535d..ba057cbe276ba 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RequestIndexFilteringTestCase.java
@@ -26,14 +26,12 @@
 import java.util.Map;
 
 import static org.elasticsearch.test.ListMatcher.matchesList;
-import static org.elasticsearch.test.MapMatcher.assertMap;
 import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.entityToMap;
 import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.requestObjectBuilder;
 import static org.hamcrest.Matchers.allOf;
 import static org.hamcrest.Matchers.anyOf;
 import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.instanceOf;
 import static org.hamcrest.Matchers.nullValue;
@@ -63,42 +61,35 @@ public void testTimestampFilterFromQuery() throws IOException {
 
         // filter includes both indices in the result (all columns, all rows)
         RestEsqlTestCase.RequestObjectBuilder builder = timestampFilter("gte", "2023-01-01").query(from("test*"));
-        Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "id2").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1 + docsTest2))).entry("took", greaterThanOrEqualTo(0))
+        assertResultMap(
+            runEsql(builder),
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "id2").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(docsTest1 + docsTest2))
         );
 
         // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)!
         builder = timestampFilter("gte", "2024-01-01").query(from("test*"));
-        assertMap(
+        assertResultMap(
             runEsql(builder),
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
 
         // filter excludes both indices (no rows); the first analysis step fails because there are no columns, a second attempt succeeds
         // after eliminating the index filter. All columns are returned.
         builder = timestampFilter("gte", "2025-01-01").query(from("test*"));
-        assertMap(
+        assertResultMap(
             runEsql(builder),
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "id2").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(0))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "id2").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(0))
         );
     }
 
@@ -110,27 +101,22 @@ public void testFieldExistsFilter_KeepWildcard() throws IOException {
 
         // filter includes only test1. Columns and rows of test2 are filtered out
         RestEsqlTestCase.RequestObjectBuilder builder = existsFilter("id1").query(from("test*"));
-        Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+        assertResultMap(
+            runEsql(builder),
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "value").entry("type", "long")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
 
         // filter includes only test1. Columns from test2 are filtered out, as well (not only rows)!
         builder = existsFilter("id1").query(from("test*") + " METADATA _index | KEEP _index, id*");
-        result = runEsql(builder);
-        assertMap(
+        Map result = runEsql(builder);
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
         @SuppressWarnings("unchecked")
         var values = (List>) result.get("values");
@@ -151,14 +137,12 @@ public void testFieldExistsFilter_With_ExplicitUseOfDiscardedIndexFields() throw
             from("test*") + " METADATA _index | SORT id2 | KEEP _index, id*"
         );
         Map result = runEsql(builder);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "id1").entry("type", "integer"))
-                    .item(matchesMap().entry("name", "id2").entry("type", "integer"))
-            ).entry("values", allOf(instanceOf(List.class), hasSize(docsTest1))).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "_index").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "id1").entry("type", "integer"))
+                .item(matchesMap().entry("name", "id2").entry("type", "integer")),
+            allOf(instanceOf(List.class), hasSize(docsTest1))
         );
         @SuppressWarnings("unchecked")
         var values = (List>) result.get("values");
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java
index bf4a4400e13cf..69bbf7420c72e 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java
@@ -23,10 +23,7 @@
 import java.util.List;
 import java.util.Map;
 
-import static org.elasticsearch.test.MapMatcher.assertMap;
-import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 
 public abstract class RestEnrichTestCase extends ESRestTestCase {
 
@@ -194,14 +191,14 @@ public void testMatchField_ImplicitFieldsList() throws IOException {
         Map result = runEsql("from test1 | enrich countries | keep number | sort number");
         var columns = List.of(Map.of("name", "number", "type", "long"));
         var values = List.of(List.of(1000), List.of(1000), List.of(5000));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     public void testMatchField_ImplicitFieldsList_WithStats() throws IOException {
         Map result = runEsql("from test1 | enrich countries | stats s = sum(number) by country_name");
         var columns = List.of(Map.of("name", "s", "type", "long"), Map.of("name", "country_name", "type", "keyword"));
         var values = List.of(List.of(2000, "United States of America"), List.of(5000, "China"));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     public void testSimpleIndexFilteringWithEnrich() throws IOException {
@@ -226,7 +223,7 @@ public void testSimpleIndexFilteringWithEnrich() throws IOException {
             Arrays.asList(null, 1000, "US", "test1"),
             Arrays.asList(3, null, "US", "test2")
         );
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
 
         // filter something that won't affect the columns
         result = runEsql("""
@@ -235,7 +232,7 @@ public void testSimpleIndexFilteringWithEnrich() throws IOException {
                 | keep *number, geo.dest, _index
                 | sort geo.dest, _index
             """, b -> b.startObject("exists").field("field", "foobar").endObject());
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", List.of()).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, List.of());
     }
 
     public void testIndexFilteringWithEnrich_RemoveOneIndex() throws IOException {
@@ -259,7 +256,7 @@ public void testIndexFilteringWithEnrich_RemoveOneIndex() throws IOException {
             Arrays.asList(null, 1000, "US", "test1")
         );
 
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
 
         // filter out test2 and use a wildcarded field name in the "keep" command
         result = runEsql("""
@@ -275,7 +272,7 @@ public void testIndexFilteringWithEnrich_RemoveOneIndex() throws IOException {
             Map.of("name", "_index", "type", "keyword")
         );
         values = List.of(Arrays.asList(5000, "CN", "test1"), Arrays.asList(1000, "US", "test1"), Arrays.asList(1000, "US", "test1"));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     public void testIndexFilteringWithEnrich_ExpectException() throws IOException {
@@ -315,7 +312,7 @@ public void testIndexFilteringWithEnrich_FilterUnusedIndexFields() throws IOExce
             Map.of("name", "_index", "type", "keyword")
         );
         var values = List.of(Arrays.asList(2, "IN", "test2"), Arrays.asList(2, "IN", "test2"), Arrays.asList(3, "US", "test2"));
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
     }
 
     private Map runEsql(String query) throws IOException {
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
index 86f8a8c5363f6..66333421eeb75 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java
@@ -59,7 +59,6 @@
 import static java.util.Map.entry;
 import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
 import static org.elasticsearch.test.ListMatcher.matchesList;
-import static org.elasticsearch.test.MapMatcher.assertMap;
 import static org.elasticsearch.test.MapMatcher.matchesMap;
 import static org.elasticsearch.xpack.esql.EsqlTestUtils.as;
 import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.Mode.ASYNC;
@@ -257,7 +256,7 @@ public static RequestObjectBuilder jsonBuilder() throws IOException {
 
     public void testGetAnswer() throws IOException {
         Map answer = runEsql(requestObjectBuilder().query("row a = 1, b = 2"));
-        assertEquals(3, answer.size());
+        assertEquals(4, answer.size());
         assertThat(((Integer) answer.get("took")).intValue(), greaterThanOrEqualTo(0));
         Map colA = Map.of("name", "a", "type", "integer");
         Map colB = Map.of("name", "b", "type", "integer");
@@ -296,21 +295,13 @@ public void testNullInAggs() throws IOException {
         assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo("{\"errors\":false}"));
 
         RequestObjectBuilder builder = requestObjectBuilder().query(fromIndex() + " | stats min(value)");
-        Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry("values", List.of(List.of(1)))
-                .entry("columns", List.of(Map.of("name", "min(value)", "type", "long")))
-                .entry("took", greaterThanOrEqualTo(0))
-        );
+        assertResultMap(runEsql(builder), List.of(Map.of("name", "min(value)", "type", "long")), List.of(List.of(1)));
 
         builder = requestObjectBuilder().query(fromIndex() + " | stats min(value) by group | sort group, `min(value)`");
-        result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry("values", List.of(List.of(2, 0), List.of(1, 1)))
-                .entry("columns", List.of(Map.of("name", "min(value)", "type", "long"), Map.of("name", "group", "type", "long")))
-                .entry("took", greaterThanOrEqualTo(0))
+        assertResultMap(
+            runEsql(builder),
+            List.of(Map.of("name", "min(value)", "type", "long"), Map.of("name", "group", "type", "long")),
+            List.of(List.of(2, 0), List.of(1, 1))
         );
     }
 
@@ -569,7 +560,7 @@ public void testMetadataFieldsOnMultipleIndices() throws IOException {
         );
         var values = List.of(List.of(3, testIndexName() + "-2", 1, "id-2"), List.of(2, testIndexName() + "-1", 2, "id-1"));
 
-        assertMap(result, matchesMap().entry("columns", columns).entry("values", values).entry("took", greaterThanOrEqualTo(0)));
+        assertResultMap(result, columns, values);
 
         assertThat(deleteIndex(testIndexName() + "-1").isAcknowledged(), is(true)); // clean up
         assertThat(deleteIndex(testIndexName() + "-2").isAcknowledged(), is(true)); // clean up
@@ -867,17 +858,15 @@ public void testInlineStatsNow() throws IOException {
                     .item(499.5)
             );
         }
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
-                    .item(matchesMap().entry("name", "test").entry("type", "text"))
-                    .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "value").entry("type", "long"))
-                    .item(matchesMap().entry("name", "now").entry("type", "date"))
-                    .item(matchesMap().entry("name", "AVG(value)").entry("type", "double"))
-            ).entry("values", values).entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "@timestamp").entry("type", "date"))
+                .item(matchesMap().entry("name", "test").entry("type", "text"))
+                .item(matchesMap().entry("name", "test.keyword").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "value").entry("type", "long"))
+                .item(matchesMap().entry("name", "now").entry("type", "date"))
+                .item(matchesMap().entry("name", "AVG(value)").entry("type", "double")),
+            values
         );
     }
 
@@ -893,11 +882,10 @@ public void testTopLevelFilter() throws IOException {
         }).query(fromIndex() + " | STATS SUM(value)");
 
         Map result = runEsql(builder);
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry("columns", matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")))
-                .entry("values", List.of(List.of(499500)))
-                .entry("took", greaterThanOrEqualTo(0))
+            matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")),
+            List.of(List.of(499500))
         );
     }
 
@@ -912,12 +900,7 @@ public void testTopLevelFilterMerged() throws IOException {
             b.endObject();
         }).query(fromIndex() + " | WHERE value == 12 | STATS SUM(value)");
         Map result = runEsql(builder);
-        assertMap(
-            result,
-            matchesMap().entry("columns", matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")))
-                .entry("values", List.of(List.of(12)))
-                .entry("took", greaterThanOrEqualTo(0))
-        );
+        assertResultMap(result, matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")), List.of(List.of(12)));
     }
 
     public void testTopLevelFilterBoolMerged() throws IOException {
@@ -946,11 +929,10 @@ public void testTopLevelFilterBoolMerged() throws IOException {
                 b.endObject();
             }).query(fromIndex() + " | WHERE @timestamp > \"2010-01-01\" | STATS SUM(value)");
             Map result = runEsql(builder);
-            assertMap(
+            assertResultMap(
                 result,
-                matchesMap().entry("columns", matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")))
-                    .entry("values", List.of(List.of(12)))
-                    .entry("took", greaterThanOrEqualTo(0))
+                matchesList().item(matchesMap().entry("name", "SUM(value)").entry("type", "long")),
+                List.of(List.of(12))
             );
         }
     }
@@ -1132,13 +1114,12 @@ public void testAsyncGetWithoutContentType() throws IOException {
         for (int i = 0; i < count; i++) {
             values = values.item(matchesList().item("keyword" + i).item(i));
         }
-        assertMap(
+        assertResultMap(
             result,
-            matchesMap().entry(
-                "columns",
-                matchesList().item(matchesMap().entry("name", "keyword").entry("type", "keyword"))
-                    .item(matchesMap().entry("name", "integer").entry("type", "integer"))
-            ).entry("values", values).entry("took", greaterThanOrEqualTo(0)).entry("id", id).entry("is_running", false)
+            getResultMatcher(result).entry("id", id).entry("is_running", false),
+            matchesList().item(matchesMap().entry("name", "keyword").entry("type", "keyword"))
+                .item(matchesMap().entry("name", "integer").entry("type", "integer")),
+            values
         );
 
     }
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec
index b2a063e509a85..1f4e555bd5d83 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec
@@ -1249,3 +1249,28 @@ sv_nanos:date_nanos               | a:keyword             | b:keyword  | c:keywo
 2023-03-23T12:15:03.360103847Z | 2023-03-23T12:15:03.360Z | 2023-03-23 | 2023-03-23T12:15:03.360103847Z
 2023-03-23T12:15:03.360103847Z | 2023-03-23T12:15:03.360Z | 2023-03-23 | 2023-03-23T12:15:03.360103847Z
 ;
+
+Date Nanos Date Diff
+required_capability: date_nanos_date_diff
+required_capability: to_date_nanos
+
+FROM date_nanos
+| EVAL n = MV_MAX(nanos)
+| EVAL diff_sec = DATE_DIFF("seconds", TO_DATE_NANOS("2023-10-23T12:15:03.360103847Z"), n)
+| EVAL diff_sec_m = DATE_DIFF("seconds", TO_DATETIME("2023-10-23T12:15:03.360103847Z"), n)
+| KEEP diff_sec, diff_sec_m, n;
+ignoreOrder:true
+
+# Note - it is expected that the millisecond diff is slightly different due to rounding.
+diff_sec:integer | diff_sec_m:integer | n:date_nanos
+5998             | 5998             | 2023-10-23T13:55:01.543123456Z
+5932             | 5932             | 2023-10-23T13:53:55.832987654Z
+5871             | 5871             | 2023-10-23T13:52:55.015787878Z
+5811             | 5811             | 2023-10-23T13:51:54.732102837Z
+4711             | 4711             | 2023-10-23T13:33:34.937193000Z
+745              | 745              | 2023-10-23T12:27:28.948000000Z
+0                | 0                | 2023-10-23T12:15:03.360103847Z
+0                | 0                | 2023-10-23T12:15:03.360103847Z
+-18489600        | -18489599        | 2023-03-23T12:15:03.360103847Z
+-18489600        | -18489599        | 2023-03-23T12:15:03.360103847Z
+;
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java
index ffbddd52b2551..7df40da0344a9 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractCrossClustersUsageTelemetryIT.java
@@ -127,7 +127,7 @@ protected CCSTelemetrySnapshot getTelemetryFromFailedQuery(String query) throws
         return getTelemetrySnapshot(queryNode);
     }
 
-    private CCSTelemetrySnapshot getTelemetrySnapshot(String nodeName) {
+    protected CCSTelemetrySnapshot getTelemetrySnapshot(String nodeName) {
         var usage = cluster(LOCAL_CLUSTER).getInstance(UsageService.class, nodeName);
         return usage.getEsqlUsageHolder().getCCSTelemetrySnapshot();
     }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java
index 5554f7e571dfb..492947304d898 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPauseFieldPlugin.java
@@ -34,12 +34,16 @@ protected void onStartExecute() {}
     // Called when the engine needs to wait for further execution to be allowed.
     protected abstract boolean onWait() throws InterruptedException;
 
+    protected String scriptTypeName() {
+        return "pause";
+    }
+
     @Override
     public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) {
         return new ScriptEngine() {
             @Override
             public String getType() {
-                return "pause";
+                return scriptTypeName();
             }
 
             @Override
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java
new file mode 100644
index 0000000000000..99a81c60a9ad2
--- /dev/null
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncEnrichStopIT.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.xcontent.json.JsonXContent;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
+import org.elasticsearch.xpack.esql.plan.logical.Enrich;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster;
+import static org.hamcrest.Matchers.equalTo;
+
+// This tests if enrich after stop works correctly
+public class CrossClusterAsyncEnrichStopIT extends AbstractEnrichBasedCrossClusterTestCase {
+
+    @Override
+    protected boolean reuseClusters() {
+        return false;
+    }
+
+    @Override
+    protected boolean tolerateErrorsWhenWipingEnrichPolicies() {
+        // attempt to wipe will fail since some clusters are already closed
+        return true;
+    }
+
+    @Override
+    protected Collection> nodePlugins(String clusterAlias) {
+        List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias));
+        plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class);
+        plugins.add(SimplePauseFieldPlugin.class);
+        return plugins;
+    }
+
+    @Before
+    public void resetPlugin() {
+        SimplePauseFieldPlugin.resetPlugin();
+    }
+
+    /**
+     * This tests that enrich and aggs work after stop. It works like this:
+     * 1. We launch the async request
+     * 2. c2 index has the pause field which will pause the query until we allow it to proceed
+     * 3. We wait until c1 is done and then stop the async request
+     * 4. We allow the query to proceed
+     * 5. The result should contain the data from local and c1 and coordinator-side enrichments should happen
+     */
+    public void testEnrichAfterStop() throws Exception {
+        setupEventsIndexWithPause("c2");
+        String query = String.format(Locale.ROOT, """
+            FROM *:events,events
+            | eval ip= TO_STR(host)
+            | %s
+            | %s
+            | eval const = coalesce(const, 1)
+            | stats c = sum(const) by vendor
+            | sort vendor
+            """, enrichHosts(Enrich.Mode.COORDINATOR), enrichVendors(Enrich.Mode.COORDINATOR));
+
+        // Start the async query
+        final String asyncExecutionId = startAsyncQuery(client(), query, randomBoolean());
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        // wait until c1 is done
+        waitForCluster(client(), "c1", asyncExecutionId);
+        waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId);
+
+        // Run the stop request
+        var stopRequest = new AsyncStopRequest(asyncExecutionId);
+        var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // Allow the processing to proceed
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+
+        try (EsqlQueryResponse resp = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            // Compare this to CrossClustersEnrichIT.testEnrichTwiceThenAggs - the results from c2 will be absent
+            // because we stopped it before processing the data
+            assertThat(
+                getValuesList(resp),
+                equalTo(
+                    List.of(
+                        List.of(5L, "Apple"),
+                        List.of(6L, "Microsoft"),
+                        List.of(3L, "Redhat"),
+                        List.of(2L, "Samsung"),
+                        Arrays.asList(2L, (String) null)
+                    )
+                )
+            );
+            EsqlExecutionInfo executionInfo = resp.getExecutionInfo();
+            assertThat(executionInfo.clusterAliases(), equalTo(Set.of("", "c1", "c2")));
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
+        }
+    }
+
+    private void setupEventsIndexWithPause(String clusterAlias) throws IOException {
+        record Event(long timestamp, String user, String host) {}
+        List events = List.of(
+            new Event(1, "park", "192.168.1.25"),
+            new Event(2, "akio", "192.168.1.5"),
+            new Event(3, "park", "192.168.1.2"),
+            new Event(4, "kevin", "192.168.1.3")
+        );
+        // Regular mapping
+        var stdMapping = PutMappingRequest.simpleMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip");
+        Map mappingMap = XContentHelper.convertToMap(BytesReference.bytes(stdMapping), false, stdMapping.contentType())
+            .v2();
+        // Pause field mapping
+        var mapping = JsonXContent.contentBuilder().startObject();
+        mapping.startObject("runtime");
+        {
+            mapping.startObject("const");
+            {
+                mapping.field("type", "long");
+                mapping.startObject("script").field("source", "").field("lang", "pause").endObject();
+            }
+            mapping.endObject();
+        }
+        mapping.endObject();
+        mapping.endObject();
+        Map mappingMap2 = XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2();
+        // Merge the two mappings
+        mappingMap.putAll(mappingMap2);
+
+        var client = client(clusterAlias);
+        assertAcked(client.admin().indices().prepareDelete("events"));
+        assertAcked(client.admin().indices().prepareCreate("events").setMapping(mappingMap));
+        for (var e : events) {
+            client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host, "const", "1").get();
+        }
+        client.admin().indices().prepareRefresh("events").get();
+    }
+}
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java
index 79ac8816a0039..8a163d7336b0b 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java
@@ -7,43 +7,53 @@
 
 package org.elasticsearch.xpack.esql.action;
 
-import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.action.ActionFuture;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.action.support.WriteRequest;
-import org.elasticsearch.action.support.master.AcknowledgedResponse;
 import org.elasticsearch.client.internal.Client;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.settings.Setting;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.compute.operator.DriverTaskRunner;
 import org.elasticsearch.compute.operator.exchange.ExchangeService;
 import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.core.Tuple;
-import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.tasks.TaskId;
+import org.elasticsearch.tasks.TaskInfo;
 import org.elasticsearch.test.AbstractMultiClustersTestCase;
 import org.elasticsearch.test.XContentTestUtils;
 import org.elasticsearch.transport.RemoteClusterAware;
 import org.elasticsearch.xcontent.XContentBuilder;
 import org.elasticsearch.xcontent.json.JsonXContent;
-import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest;
-import org.elasticsearch.xpack.core.async.GetAsyncResultRequest;
-import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction;
+import org.elasticsearch.xpack.core.async.AsyncExecutionId;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
 import org.junit.Before;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
-import static org.elasticsearch.core.TimeValue.timeValueMillis;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase.randomIncludeCCSMetadata;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.getAsyncResponse;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.runAsyncQuery;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQuery;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.startAsyncQueryWithPragmas;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.waitForCluster;
+import static org.hamcrest.Matchers.empty;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 import static org.hamcrest.Matchers.is;
@@ -57,6 +67,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase {
     private static String LOCAL_INDEX = "logs-1";
     private static String REMOTE_INDEX = "logs-2";
     private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking";
+    private static final String INDEX_WITH_FAIL_MAPPING = "failing";
 
     @Override
     protected List remoteClusterAlias() {
@@ -65,7 +76,7 @@ protected List remoteClusterAlias() {
 
     @Override
     protected Map skipUnavailableForRemoteClusters() {
-        return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean());
+        return Map.of(REMOTE_CLUSTER_1, false, REMOTE_CLUSTER_2, randomBoolean());
     }
 
     @Override
@@ -75,6 +86,8 @@ protected Collection> nodePlugins(String clusterAlias) {
         plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action
         plugins.add(InternalExchangePlugin.class);
         plugins.add(SimplePauseFieldPlugin.class);
+        plugins.add(FailingPauseFieldPlugin.class);
+        plugins.add(CountingPauseFieldPlugin.class);
         return plugins;
     }
 
@@ -94,6 +107,8 @@ public List> getSettings() {
     @Before
     public void resetPlugin() {
         SimplePauseFieldPlugin.resetPlugin();
+        FailingPauseFieldPlugin.resetPlugin();
+        CountingPauseFieldPlugin.resetPlugin();
     }
 
     /**
@@ -103,42 +118,28 @@ public void testSuccessfulPathways() throws Exception {
         Map testClusterInfo = setupClusters(3);
         int localNumShards = (Integer) testClusterInfo.get("local.num_shards");
         int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards");
-        int remote2NumShards = (Integer) testClusterInfo.get("remote2.blocking_index.num_shards");
+        populateRuntimeIndex(REMOTE_CLUSTER_2, "pause", INDEX_WITH_RUNTIME_MAPPING);
 
         Tuple includeCCSMetadata = randomIncludeCCSMetadata();
-        Boolean requestIncludeMeta = includeCCSMetadata.v1();
         boolean responseExpectMeta = includeCCSMetadata.v2();
 
-        AtomicReference asyncExecutionId = new AtomicReference<>();
-
-        String q = "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(const) | LIMIT 10";
-        try (EsqlQueryResponse resp = runAsyncQuery(q, requestIncludeMeta, null, TimeValue.timeValueMillis(100))) {
-            assertTrue(resp.isRunning());
-            assertNotNull("async execution id is null", resp.asyncExecutionId());
-            asyncExecutionId.set(resp.asyncExecutionId().get());
-            // executionInfo may or may not be set on the initial response when there is a relatively low wait_for_completion_timeout
-            // so we do not check for it here
-        }
-
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(const) | LIMIT 10",
+            includeCCSMetadata.v1()
+        );
         // wait until we know that the query against 'remote-b:blocking' has started
         SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
 
         // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it)
-        assertBusy(() -> {
-            try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
-                EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
-                assertNotNull(executionInfo);
-                EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster("cluster-a");
-                assertThat(clusterA.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)));
-            }
-        });
+        waitForCluster(client(), "cluster-a", asyncExecutionId);
 
         /* at this point:
          *  the query against cluster-a should be finished
          *  the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown)
          *  the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b
          */
-        try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
+        try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
             EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
             assertThat(asyncResponse.isRunning(), is(true));
             assertThat(
@@ -149,13 +150,8 @@ public void testSuccessfulPathways() throws Exception {
             assertThat(executionInfo.getClusterStateCount(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL), equalTo(1));
 
             EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1);
-            assertThat(clusterA.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(clusterA.getTotalShards(), greaterThanOrEqualTo(1));
-            assertThat(clusterA.getSuccessfulShards(), equalTo(clusterA.getTotalShards()));
-            assertThat(clusterA.getSkippedShards(), equalTo(0));
-            assertThat(clusterA.getFailedShards(), equalTo(0));
-            assertThat(clusterA.getFailures().size(), equalTo(0));
-            assertThat(clusterA.getTook().millis(), greaterThanOrEqualTo(0L));
+            // Should be done and successful
+            assertClusterInfoSuccess(clusterA, clusterA.getTotalShards());
 
             EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
             // should still be RUNNING since the local cluster has to do a STATS on the coordinator, waiting on remoteB
@@ -175,7 +171,7 @@ public void testSuccessfulPathways() throws Exception {
 
         // wait until both remoteB and local queries have finished
         assertBusy(() -> {
-            try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
+            try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
                 EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
                 assertNotNull(executionInfo);
                 EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2);
@@ -186,40 +182,30 @@ public void testSuccessfulPathways() throws Exception {
             }
         });
 
-        try (EsqlQueryResponse asyncResponse = getAsyncResponse(asyncExecutionId.get())) {
+        try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
             EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
             assertNotNull(executionInfo);
             assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(1L));
+            assertThat(executionInfo.isPartial(), equalTo(false));
 
             EsqlExecutionInfo.Cluster clusterA = executionInfo.getCluster(REMOTE_CLUSTER_1);
-            assertThat(clusterA.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(clusterA.getTook().millis(), greaterThanOrEqualTo(0L));
-            assertThat(clusterA.getTotalShards(), equalTo(remote1NumShards));
-            assertThat(clusterA.getSuccessfulShards(), equalTo(remote1NumShards));
-            assertThat(clusterA.getSkippedShards(), equalTo(0));
-            assertThat(clusterA.getFailedShards(), equalTo(0));
-            assertThat(clusterA.getFailures().size(), equalTo(0));
+            assertClusterInfoSuccess(clusterA, remote1NumShards);
 
             EsqlExecutionInfo.Cluster remoteB = executionInfo.getCluster(REMOTE_CLUSTER_2);
-            assertThat(remoteB.getTook().millis(), greaterThanOrEqualTo(0L));
-            assertThat(remoteB.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(remoteB.getTotalShards(), equalTo(remote2NumShards));
-            assertThat(remoteB.getSuccessfulShards(), equalTo(remote2NumShards));
-            assertThat(remoteB.getSkippedShards(), equalTo(0));
-            assertThat(remoteB.getFailedShards(), equalTo(0));
-            assertThat(remoteB.getFailures().size(), equalTo(0));
+            assertClusterInfoSuccess(remoteB, 1);
 
             EsqlExecutionInfo.Cluster local = executionInfo.getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
-            assertThat(local.getTook().millis(), greaterThanOrEqualTo(0L));
-            assertThat(local.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-            assertThat(local.getTotalShards(), equalTo(localNumShards));
-            assertThat(local.getSuccessfulShards(), equalTo(localNumShards));
-            assertThat(local.getSkippedShards(), equalTo(0));
-            assertThat(local.getFailedShards(), equalTo(0));
-            assertThat(local.getFailures().size(), equalTo(0));
+            assertClusterInfoSuccess(local, localNumShards);
+
+            // Check that stop produces the same result
+            try (
+                EsqlQueryResponse stopResponse = client().execute(EsqlAsyncStopAction.INSTANCE, new AsyncStopRequest(asyncExecutionId))
+                    .get()
+            ) {
+                assertThat(stopResponse, equalTo(asyncResponse));
+            }
         } finally {
-            AcknowledgedResponse acknowledgedResponse = deleteAsyncId(asyncExecutionId.get());
-            assertThat(acknowledgedResponse.isAcknowledged(), is(true));
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
@@ -231,7 +217,7 @@ public void testAsyncQueriesWithLimit0() throws IOException {
 
         final TimeValue waitForCompletion = TimeValue.timeValueNanos(randomFrom(1L, Long.MAX_VALUE));
         String asyncExecutionId = null;
-        try (EsqlQueryResponse resp = runAsyncQuery("FROM logs*,*:logs* | LIMIT 0", requestIncludeMeta, null, waitForCompletion)) {
+        try (EsqlQueryResponse resp = runAsyncQuery(client(), "FROM logs*,*:logs* | LIMIT 0", requestIncludeMeta, waitForCompletion)) {
             EsqlExecutionInfo executionInfo = resp.getExecutionInfo();
             if (resp.isRunning()) {
                 asyncExecutionId = resp.asyncExecutionId().get();
@@ -252,88 +238,294 @@ public void testAsyncQueriesWithLimit0() throws IOException {
                 assertThat(overallTookMillis, greaterThanOrEqualTo(0L));
                 assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta));
                 assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2)));
+                assertThat(executionInfo.isPartial(), equalTo(false));
 
                 EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
-                assertThat(remoteCluster.getIndexExpression(), equalTo("logs*"));
-                assertThat(remoteCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-                assertThat(remoteCluster.getTook().millis(), greaterThanOrEqualTo(0L));
                 assertThat(remoteCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis));
-                assertThat(remoteCluster.getTotalShards(), equalTo(0));
-                assertThat(remoteCluster.getSuccessfulShards(), equalTo(0));
-                assertThat(remoteCluster.getSkippedShards(), equalTo(0));
-                assertThat(remoteCluster.getFailedShards(), equalTo(0));
+                assertThat(remoteCluster.getIndexExpression(), equalTo("logs*"));
+                assertClusterInfoSuccess(remoteCluster, 0);
 
-                EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
+                EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2);
+                assertClusterInfoSuccess(remote2Cluster, 0);
                 assertThat(remote2Cluster.getIndexExpression(), equalTo("logs*"));
-                assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-                assertThat(remote2Cluster.getTook().millis(), greaterThanOrEqualTo(0L));
                 assertThat(remote2Cluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis));
-                assertThat(remote2Cluster.getTotalShards(), equalTo(0));
-                assertThat(remote2Cluster.getSuccessfulShards(), equalTo(0));
-                assertThat(remote2Cluster.getSkippedShards(), equalTo(0));
-                assertThat(remote2Cluster.getFailedShards(), equalTo(0));
 
                 EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER);
+                assertClusterInfoSuccess(localCluster, 0);
                 assertThat(localCluster.getIndexExpression(), equalTo("logs*"));
-                assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
-                assertThat(localCluster.getTook().millis(), greaterThanOrEqualTo(0L));
                 assertThat(localCluster.getTook().millis(), lessThanOrEqualTo(overallTookMillis));
-                assertThat(remote2Cluster.getTotalShards(), equalTo(0));
-                assertThat(remote2Cluster.getSuccessfulShards(), equalTo(0));
-                assertThat(remote2Cluster.getSkippedShards(), equalTo(0));
-                assertThat(remote2Cluster.getFailedShards(), equalTo(0));
 
                 assertClusterMetadataInResponse(resp, responseExpectMeta, 3);
             }
         } finally {
             if (asyncExecutionId != null) {
-                AcknowledgedResponse acknowledgedResponse = deleteAsyncId(asyncExecutionId);
-                assertThat(acknowledgedResponse.isAcknowledged(), is(true));
+                assertAcked(deleteAsyncId(client(), asyncExecutionId));
             }
         }
     }
 
-    protected EsqlQueryResponse runAsyncQuery(String query, Boolean ccsMetadata, QueryBuilder filter, TimeValue waitCompletionTime) {
-        EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest();
-        request.query(query);
-        request.pragmas(AbstractEsqlIntegTestCase.randomPragmas());
-        request.profile(randomInt(5) == 2);
-        request.columnar(randomBoolean());
-        if (ccsMetadata != null) {
-            request.includeCCSMetadata(ccsMetadata);
+    public void testStopQuery() throws Exception {
+        Map testClusterInfo = setupClusters(3);
+        int localNumShards = (Integer) testClusterInfo.get("local.num_shards");
+        int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards");
+        // Create large index so we could be sure we're stopping before the end
+        populateRuntimeIndex(REMOTE_CLUSTER_2, "pause_count", INDEX_WITH_RUNTIME_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        boolean responseExpectMeta = includeCCSMetadata.v2();
+
+        final String asyncExecutionId = startAsyncQueryWithPragmas(
+            client(),
+            "FROM logs-*,cluster-a:logs-*,remote-b:blocking | STATS total=sum(coalesce(const,v)) | LIMIT 1",
+            includeCCSMetadata.v1(),
+            Map.of("page_size", 1, "data_partitioning", "shard", "task_concurrency", 1)
+        );
+
+        // wait until we know that the query against 'remote-b:blocking' has started
+        CountingPauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        // wait until the query of 'cluster-a:logs-*' has finished (it is not blocked since we are not searching the 'blocking' index on it)
+        waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId);
+        waitForCluster(client(), LOCAL_CLUSTER, asyncExecutionId);
+
+        /* at this point:
+         *  the query against cluster-a should be finished
+         *  the query against remote-b should be running (blocked on the PauseFieldPlugin.allowEmitting CountDown)
+         *  the query against the local cluster should be running because it has a STATS clause that needs to wait on remote-b
+         */
+
+        // run the stop query
+        AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId);
+        ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        assertBusy(() -> {
+            List tasks = getDriverTasks(client(REMOTE_CLUSTER_2));
+            List reduceTasks = tasks.stream().filter(t -> t.description().contains("_LuceneSourceOperator") == false).toList();
+            assertThat(reduceTasks, empty());
+        });
+        // allow remoteB query to proceed
+        CountingPauseFieldPlugin.allowEmitting.countDown();
+
+        // Since part of the query has not been stopped, we expect some result to emerge here
+        try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            // Check that we did not process all the fields on remote-b
+            // Should not be getting more than one page here, and we set page size to 1
+            assertThat(CountingPauseFieldPlugin.count.get(), lessThanOrEqualTo(1L));
+            assertThat(asyncResponse.isRunning(), is(false));
+            assertThat(asyncResponse.columns().size(), equalTo(1));
+            assertThat(asyncResponse.values().hasNext(), is(true));
+            Iterator row = asyncResponse.values().next();
+            // sum of 0-9 is 45, and sum of 0-9 squared is 285
+            assertThat(row.next(), equalTo(330L));
+
+            EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+            assertNotNull(executionInfo);
+            assertThat(executionInfo.isCrossClusterSearch(), is(true));
+            long overallTookMillis = executionInfo.overallTook().millis();
+            assertThat(overallTookMillis, greaterThanOrEqualTo(0L));
+            assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2)));
+            assertThat(executionInfo.isPartial(), equalTo(true));
+
+            EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
+            assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(remoteCluster, remote1NumShards);
+
+            EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2);
+            assertThat(remote2Cluster.getIndexExpression(), equalTo("blocking"));
+            assertThat(remote2Cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL));
+
+            EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER);
+            assertThat(localCluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(localCluster, localNumShards);
+
+            assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3);
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
-        request.waitForCompletionTimeout(waitCompletionTime);
-        request.keepOnCompletion(false);
-        if (filter != null) {
-            request.filter(filter);
+    }
+
+    public void testStopQueryLocal() throws Exception {
+        Map testClusterInfo = setupClusters(3);
+        int remote1NumShards = (Integer) testClusterInfo.get("remote1.num_shards");
+        int remote2NumShards = (Integer) testClusterInfo.get("remote2.num_shards");
+        populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_RUNTIME_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        boolean responseExpectMeta = includeCCSMetadata.v2();
+
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM blocking,*:logs-* | STATS total=sum(coalesce(const,v)) | LIMIT 1",
+            includeCCSMetadata.v1()
+        );
+
+        // wait until we know that the query against 'remote-b:blocking' has started
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        // wait until the remotes are done
+        waitForCluster(client(), REMOTE_CLUSTER_1, asyncExecutionId);
+        waitForCluster(client(), REMOTE_CLUSTER_2, asyncExecutionId);
+
+        /* at this point:
+         *  the query against remotes should be finished
+         *  the query against the local cluster should be running because it's blocked
+         */
+
+        // run the stop query
+        AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId);
+        ActionFuture stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // ensure stop operation is running
+        assertBusy(() -> {
+            try (EsqlQueryResponse asyncResponse = getAsyncResponse(client(), asyncExecutionId)) {
+                EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+                assertNotNull(executionInfo);
+                assertThat(executionInfo.isPartial(), is(true));
+            }
+        });
+        // allow local query to proceed
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+
+        // Since part of the query has not been stopped, we expect some result to emerge here
+        try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            assertThat(asyncResponse.isRunning(), is(false));
+            assertThat(asyncResponse.columns().size(), equalTo(1));
+            assertThat(asyncResponse.values().hasNext(), is(true));
+            Iterator row = asyncResponse.values().next();
+            // sum of 0-9 squared is 285, from two remotes it's 570
+            assertThat(row.next(), equalTo(570L));
+
+            EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+            assertNotNull(executionInfo);
+            assertThat(executionInfo.isCrossClusterSearch(), is(true));
+            long overallTookMillis = executionInfo.overallTook().millis();
+            assertThat(overallTookMillis, greaterThanOrEqualTo(0L));
+            assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER, REMOTE_CLUSTER_1, REMOTE_CLUSTER_2)));
+            assertThat(executionInfo.isPartial(), equalTo(true));
+
+            EsqlExecutionInfo.Cluster remoteCluster = executionInfo.getCluster(REMOTE_CLUSTER_1);
+            assertThat(remoteCluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(remoteCluster, remote1NumShards);
+
+            EsqlExecutionInfo.Cluster remote2Cluster = executionInfo.getCluster(REMOTE_CLUSTER_2);
+            assertThat(remote2Cluster.getIndexExpression(), equalTo("logs-*"));
+            assertClusterInfoSuccess(remote2Cluster, remote2NumShards);
+
+            EsqlExecutionInfo.Cluster localCluster = executionInfo.getCluster(LOCAL_CLUSTER);
+            assertThat(localCluster.getIndexExpression(), equalTo("blocking"));
+            assertThat(localCluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.PARTIAL));
+
+            assertClusterMetadataInResponse(asyncResponse, responseExpectMeta, 3);
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
-        return runAsyncQuery(request);
     }
 
-    protected EsqlQueryResponse runAsyncQuery(EsqlQueryRequest request) {
-        try {
-            return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS);
-        } catch (ElasticsearchTimeoutException e) {
-            throw new AssertionError("timeout waiting for query response", e);
+    public void testStopQueryLocalNoRemotes() throws Exception {
+        setupClusters(3);
+        populateRuntimeIndex(LOCAL_CLUSTER, "pause", INDEX_WITH_RUNTIME_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        boolean responseExpectMeta = includeCCSMetadata.v2();
+
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM blocking | STATS total=count(const) | LIMIT 1",
+            includeCCSMetadata.v1()
+        );
+
+        // wait until we know that the query against 'remote-b:blocking' has started
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+
+        /* at this point:
+         *  the query against the local cluster should be running because it's blocked
+         */
+
+        // run the stop query
+        var stopRequest = new AsyncStopRequest(asyncExecutionId);
+        var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // allow local query to proceed
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+
+        try (EsqlQueryResponse asyncResponse = stopAction.actionGet(30, TimeUnit.SECONDS)) {
+            assertThat(asyncResponse.isRunning(), is(false));
+            assertThat(asyncResponse.columns().size(), equalTo(1));
+            assertThat(asyncResponse.values().hasNext(), is(true));
+            Iterator row = asyncResponse.values().next();
+            assertThat((long) row.next(), greaterThanOrEqualTo(0L));
+
+            EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+            assertNotNull(executionInfo);
+            assertThat(executionInfo.isCrossClusterSearch(), is(false));
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
-    AcknowledgedResponse deleteAsyncId(String id) {
+    public void testAsyncFailure() throws Exception {
+        Map testClusterInfo = setupClusters(2);
+        populateRuntimeIndex(REMOTE_CLUSTER_1, "pause_fail", INDEX_WITH_FAIL_MAPPING);
+
+        Tuple includeCCSMetadata = randomIncludeCCSMetadata();
+        final String asyncExecutionId = startAsyncQuery(
+            client(),
+            "FROM logs-*,cluster-a:failing | STATS total=sum(const) | LIMIT 1",
+            includeCCSMetadata.v1()
+        );
+        // wait until we know that the query against remote has started
+        FailingPauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+        // Allow to proceed
+        FailingPauseFieldPlugin.allowEmitting.countDown();
+
+        // wait until local queries have finished
         try {
-            DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id);
-            return client().execute(TransportDeleteAsyncResultAction.TYPE, request).actionGet(30, TimeUnit.SECONDS);
-        } catch (ElasticsearchTimeoutException e) {
-            throw new AssertionError("timeout waiting for DELETE response", e);
+            assertBusy(() -> assertThrows(Exception.class, () -> getAsyncResponse(client(), asyncExecutionId)));
+            // Ensure stop query fails too when get fails
+            assertThrows(
+                ElasticsearchException.class,
+                () -> client().execute(EsqlAsyncStopAction.INSTANCE, new AsyncStopRequest(asyncExecutionId)).actionGet()
+            );
+        } finally {
+            assertAcked(deleteAsyncId(client(), asyncExecutionId));
         }
     }
 
-    EsqlQueryResponse getAsyncResponse(String id) {
-        try {
-            var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueMillis(1));
-            return client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS);
-        } catch (ElasticsearchTimeoutException e) {
-            throw new AssertionError("timeout waiting for GET async result", e);
+    private String randomAsyncId() {
+        return AsyncExecutionId.encode(randomAlphaOfLength(10), new TaskId(randomAlphaOfLength(10), randomLong()));
+    }
+
+    public void testBadAsyncId() throws Exception {
+        setupClusters(3);
+        final AtomicReference asyncId = new AtomicReference<>();
+        try (
+            EsqlQueryResponse resp = runAsyncQuery(
+                client(),
+                "FROM logs-*,*:logs-* | STATS total=sum(const) | LIMIT 1",
+                randomBoolean(),
+                TimeValue.timeValueMillis(0)
+            )
+        ) {
+            assertTrue(resp.isRunning());
+            asyncId.set(resp.asyncExecutionId().get());
         }
+        assertBusy(() -> {
+            try (EsqlQueryResponse resp = getAsyncResponse(client(), asyncId.get())) {
+                assertFalse(resp.isRunning());
+            }
+        });
+
+        String randomAsyncIdasyncId = randomAsyncId();
+        var stopRequest = new AsyncStopRequest(randomAsyncIdasyncId);
+        var stopAction = client().execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        assertThrows(ResourceNotFoundException.class, () -> stopAction.actionGet(1000, TimeUnit.SECONDS));
+    }
+
+    private void assertClusterInfoSuccess(EsqlExecutionInfo.Cluster cluster, int numShards) {
+        assertThat(cluster.getTook().millis(), greaterThanOrEqualTo(0L));
+        assertThat(cluster.getStatus(), equalTo(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL));
+        assertThat(cluster.getTotalShards(), equalTo(numShards));
+        assertThat(cluster.getSuccessfulShards(), equalTo(numShards));
+        assertThat(cluster.getSkippedShards(), equalTo(0));
+        assertThat(cluster.getFailedShards(), equalTo(0));
+        assertThat(cluster.getFailures().size(), equalTo(0));
     }
 
     private static void assertClusterMetadataInResponse(EsqlQueryResponse resp, boolean responseExpectMeta, int numClusters) {
@@ -373,11 +565,8 @@ Map setupClusters(int numClusters) throws IOException {
         if (numClusters == 3) {
             int numShardsRemote2 = randomIntBetween(1, 5);
             populateRemoteIndices(REMOTE_CLUSTER_2, REMOTE_INDEX, numShardsRemote2);
-            populateRemoteIndicesWithRuntimeMapping(REMOTE_CLUSTER_2);
             clusterInfo.put("remote2.index", REMOTE_INDEX);
             clusterInfo.put("remote2.num_shards", numShardsRemote2);
-            clusterInfo.put("remote2.blocking_index", INDEX_WITH_RUNTIME_MAPPING);
-            clusterInfo.put("remote2.blocking_index.num_shards", 1);
         }
 
         String skipUnavailableKey = Strings.format("cluster.remote.%s.skip_unavailable", REMOTE_CLUSTER_1);
@@ -405,23 +594,26 @@ void populateLocalIndices(String indexName, int numShards) {
         localClient.admin().indices().prepareRefresh(indexName).get();
     }
 
-    void populateRemoteIndicesWithRuntimeMapping(String clusterAlias) throws IOException {
+    void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException {
+        populateRuntimeIndex(clusterAlias, langName, indexName, 10);
+    }
+
+    void populateRuntimeIndex(String clusterAlias, String langName, String indexName, int count) throws IOException {
         XContentBuilder mapping = JsonXContent.contentBuilder().startObject();
         mapping.startObject("runtime");
         {
             mapping.startObject("const");
             {
                 mapping.field("type", "long");
-                mapping.startObject("script").field("source", "").field("lang", "pause").endObject();
+                mapping.startObject("script").field("source", "").field("lang", langName).endObject();
             }
             mapping.endObject();
         }
         mapping.endObject();
         mapping.endObject();
-        client(clusterAlias).admin().indices().prepareCreate(INDEX_WITH_RUNTIME_MAPPING).setMapping(mapping).get();
-        BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(INDEX_WITH_RUNTIME_MAPPING)
-            .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
-        for (int i = 0; i < 10; i++) {
+        client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get();
+        BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+        for (int i = 0; i < count; i++) {
             bulk.add(new IndexRequest().source("foo", i));
         }
         bulk.get();
@@ -441,4 +633,26 @@ void populateRemoteIndices(String clusterAlias, String indexName, int numShards)
         }
         remoteClient.admin().indices().prepareRefresh(indexName).get();
     }
+
+    public static class CountingPauseFieldPlugin extends SimplePauseFieldPlugin {
+        public static AtomicLong count = new AtomicLong(0);
+
+        protected String scriptTypeName() {
+            return "pause_count";
+        }
+
+        public static void resetPlugin() {
+            count.set(0);
+        }
+
+        @Override
+        public boolean onWait() throws InterruptedException {
+            count.incrementAndGet();
+            return allowEmitting.await(30, TimeUnit.SECONDS);
+        }
+    }
+
+    private static List getDriverTasks(Client client) {
+        return client.admin().cluster().prepareListTasks().setActions(DriverTaskRunner.ACTION_NAME).setDetailed(true).get().getTasks();
+    }
 }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java
index cd30ab02676fc..89f7fdca79135 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersUsageTelemetryIT.java
@@ -7,29 +7,51 @@
 
 package org.elasticsearch.xpack.esql.action;
 
+import org.elasticsearch.action.ActionFuture;
 import org.elasticsearch.action.admin.cluster.stats.CCSTelemetrySnapshot;
 import org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.WriteRequest;
 import org.elasticsearch.plugins.Plugin;
 import org.elasticsearch.test.SkipUnavailableRule;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.json.JsonXContent;
+import org.elasticsearch.xpack.core.async.AsyncStopRequest;
+import org.junit.Before;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
 
 import static org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry.ASYNC_FEATURE;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse;
+import static org.elasticsearch.xpack.esql.action.EsqlAsyncTestUtils.deleteAsyncId;
 import static org.hamcrest.Matchers.equalTo;
 
 public class CrossClustersUsageTelemetryIT extends AbstractCrossClustersUsageTelemetryIT {
+    private static final String INDEX_WITH_RUNTIME_MAPPING = "blocking";
 
     @Override
     protected Collection> nodePlugins(String clusterAlias) {
         List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias));
         plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class);
         plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class);
+        plugins.add(SimplePauseFieldPlugin.class);
+        plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action
         return plugins;
     }
 
+    @Before
+    public void resetPlugin() {
+        SimplePauseFieldPlugin.resetPlugin();
+    }
+
     public void assertPerClusterCount(CCSTelemetrySnapshot.PerClusterCCSTelemetry perCluster, long count) {
         assertThat(perCluster.getCount(), equalTo(count));
         assertThat(perCluster.getSkippedCount(), equalTo(0L));
@@ -202,6 +224,59 @@ public void testAsync() throws Exception {
         assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 2L);
     }
 
+    public void testAsyncStop() throws Exception {
+        setupClusters();
+        populateRuntimeIndex(REMOTE1, "pause", INDEX_WITH_RUNTIME_MAPPING);
+        populateRuntimeIndex(REMOTE2, "pause", INDEX_WITH_RUNTIME_MAPPING);
+
+        EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest();
+        request.query("from logs-*,c*:logs-*,c*:blocking | eval v1=coalesce(const, v) | stats sum (v1)");
+        request.pragmas(AbstractEsqlIntegTestCase.randomPragmas());
+        request.columnar(randomBoolean());
+        request.includeCCSMetadata(randomBoolean());
+
+        AtomicReference asyncExecutionId = new AtomicReference<>();
+        assertResponse(cluster(LOCAL_CLUSTER).client(queryNode).execute(EsqlQueryAction.INSTANCE, request), resp -> {
+            if (resp.isRunning()) {
+                assertNotNull("async execution id is null", resp.asyncExecutionId());
+                asyncExecutionId.set(resp.asyncExecutionId().get());
+            }
+        });
+        SimplePauseFieldPlugin.startEmitting.await(30, TimeUnit.SECONDS);
+        AsyncStopRequest stopRequest = new AsyncStopRequest(asyncExecutionId.get());
+        ActionFuture actionFuture = cluster(LOCAL_CLUSTER).client(queryNode)
+            .execute(EsqlAsyncStopAction.INSTANCE, stopRequest);
+        // Release the pause
+        SimplePauseFieldPlugin.allowEmitting.countDown();
+        try (EsqlQueryResponse resp = actionFuture.actionGet(30, TimeUnit.SECONDS)) {
+            assertTrue(resp.getExecutionInfo().isPartial());
+
+            CCSTelemetrySnapshot telemetry = getTelemetrySnapshot(queryNode);
+
+            assertThat(telemetry.getTotalCount(), equalTo(1L));
+            assertThat(telemetry.getSuccessCount(), equalTo(1L));
+            assertThat(telemetry.getFailureReasons().size(), equalTo(0));
+            assertThat(telemetry.getTook().count(), equalTo(1L));
+            assertThat(telemetry.getTookMrtFalse().count(), equalTo(0L));
+            assertThat(telemetry.getTookMrtTrue().count(), equalTo(0L));
+            assertThat(telemetry.getRemotesPerSearchAvg(), equalTo(2.0));
+            assertThat(telemetry.getRemotesPerSearchMax(), equalTo(2L));
+            assertThat(telemetry.getSearchCountWithSkippedRemotes(), equalTo(0L));
+            assertThat(telemetry.getClientCounts().size(), equalTo(0));
+            assertThat(telemetry.getFeatureCounts().get(ASYNC_FEATURE), equalTo(1L));
+
+            var perCluster = telemetry.getByRemoteCluster();
+            assertThat(perCluster.size(), equalTo(3));
+            for (String clusterAlias : remoteClusterAlias()) {
+                assertPerClusterCount(perCluster.get(clusterAlias), 1L);
+            }
+            assertPerClusterCount(perCluster.get(LOCAL_CLUSTER), 1L);
+        } finally {
+            // Clean up
+            assertAcked(deleteAsyncId(client(), asyncExecutionId.get()));
+        }
+    }
+
     public void testNoSuchCluster() throws Exception {
         setupClusters();
         // This is not recognized as a cross-cluster search
@@ -225,4 +300,25 @@ public void testDisconnect() throws Exception {
         assertThat(telemetry.getFailureReasons(), equalTo(expectedFailure));
     }
 
+    void populateRuntimeIndex(String clusterAlias, String langName, String indexName) throws IOException {
+        XContentBuilder mapping = JsonXContent.contentBuilder().startObject();
+        mapping.startObject("runtime");
+        {
+            mapping.startObject("const");
+            {
+                mapping.field("type", "long");
+                mapping.startObject("script").field("source", "").field("lang", langName).endObject();
+            }
+            mapping.endObject();
+        }
+        mapping.endObject();
+        mapping.endObject();
+        client(clusterAlias).admin().indices().prepareCreate(indexName).setMapping(mapping).get();
+        BulkRequestBuilder bulk = client(clusterAlias).prepareBulk(indexName).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+        for (int i = 0; i < 10; i++) {
+            bulk.add(new IndexRequest().source("foo", i));
+        }
+        bulk.get();
+    }
+
 }
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
index 8e27cfceb28e6..2d0a15436bf82 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
@@ -362,7 +362,8 @@ private void assertCancelled(ActionFuture response) throws Ex
                     "task cancelled",
                     "request cancelled test cancel",
                     "parent task was cancelled [test cancel]",
-                    "cancelled on failure"
+                    "cancelled on failure",
+                    "task cancelled [cancelled on failure]"
                 )
             )
         );
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java
new file mode 100644
index 0000000000000..d7117fb5e0750
--- /dev/null
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncTestUtils.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.Nullable;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.transport.RemoteClusterAware;
+import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest;
+import org.elasticsearch.xpack.core.async.GetAsyncResultRequest;
+import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction;
+import org.elasticsearch.xpack.esql.plugin.QueryPragmas;
+
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+
+import static org.elasticsearch.core.TimeValue.timeValueMillis;
+import static org.elasticsearch.test.ESTestCase.assertBusy;
+import static org.elasticsearch.test.ESTestCase.assertThat;
+import static org.elasticsearch.test.ESTestCase.randomBoolean;
+import static org.elasticsearch.test.ESTestCase.randomInt;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public final class EsqlAsyncTestUtils {
+    public static String startAsyncQuery(Client client, String q, Boolean includeCCSMetadata) {
+        return startAsyncQueryWithPragmas(client, q, includeCCSMetadata, null);
+    }
+
+    public static String startAsyncQueryWithPragmas(Client client, String q, Boolean includeCCSMetadata, Map pragmas) {
+        try (EsqlQueryResponse resp = runAsyncQuery(client, q, includeCCSMetadata, TimeValue.timeValueMillis(100), pragmas)) {
+            assertTrue(resp.isRunning());
+            assertNotNull("async execution id is null", resp.asyncExecutionId());
+            // executionInfo may or may not be set on the initial response when there is a relatively low wait_for_completion_timeout
+            // so we do not check for it here
+            return resp.asyncExecutionId().get();
+        }
+    }
+
+    public static EsqlQueryResponse runAsyncQuery(Client client, String query, Boolean ccsMetadata, TimeValue waitCompletionTime) {
+        return runAsyncQuery(client, query, ccsMetadata, waitCompletionTime, null);
+    }
+
+    private static QueryPragmas randomPragmasWithOverride(@Nullable Map pragmas) {
+        if (pragmas == null || pragmas.isEmpty()) {
+            return AbstractEsqlIntegTestCase.randomPragmas();
+        }
+        Settings.Builder settings = Settings.builder();
+        settings.put(AbstractEsqlIntegTestCase.randomPragmas().getSettings());
+        settings.loadFromMap(pragmas);
+        return new QueryPragmas(settings.build());
+    }
+
+    public static EsqlQueryResponse runAsyncQuery(
+        Client client,
+        String query,
+        Boolean ccsMetadata,
+        TimeValue waitCompletionTime,
+        @Nullable Map pragmas
+    ) {
+        EsqlQueryRequest request = EsqlQueryRequest.asyncEsqlQueryRequest();
+        request.query(query);
+        request.pragmas(randomPragmasWithOverride(pragmas));
+        request.profile(randomInt(5) == 2);
+        request.columnar(randomBoolean());
+        if (ccsMetadata != null) {
+            request.includeCCSMetadata(ccsMetadata);
+        }
+        request.waitForCompletionTimeout(waitCompletionTime);
+        request.keepOnCompletion(true);
+        return runAsyncQuery(client, request);
+    }
+
+    /**
+     * Wait for the cluster to finish running the query.
+     */
+    public static void waitForCluster(Client client, String clusterName, String asyncExecutionId) throws Exception {
+        assertBusy(() -> {
+            try (EsqlQueryResponse asyncResponse = getAsyncResponse(client, asyncExecutionId)) {
+                EsqlExecutionInfo executionInfo = asyncResponse.getExecutionInfo();
+                assertNotNull(executionInfo);
+                EsqlExecutionInfo.Cluster clusterInfo = executionInfo.getCluster(clusterName);
+                // the status of the local cluster won't move to SUCCESS until the reduction pipeline is done
+                if (RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY.equals(clusterName)
+                    && Objects.requireNonNullElse(clusterInfo.getTotalShards(), 0) > 0) {
+                    return;
+                }
+                assertThat(clusterInfo.getStatus(), not(equalTo(EsqlExecutionInfo.Cluster.Status.RUNNING)));
+            }
+        });
+    }
+
+    public static EsqlQueryResponse runAsyncQuery(Client client, EsqlQueryRequest request) {
+        try {
+            return client.execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS);
+        } catch (ElasticsearchTimeoutException e) {
+            throw new AssertionError("timeout waiting for query response", e);
+        }
+    }
+
+    public static AcknowledgedResponse deleteAsyncId(Client client, String id) {
+        try {
+            DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id);
+            return client.execute(TransportDeleteAsyncResultAction.TYPE, request).actionGet(30, TimeUnit.SECONDS);
+        } catch (ElasticsearchTimeoutException e) {
+            throw new AssertionError("timeout waiting for DELETE response", e);
+        }
+    }
+
+    public static EsqlQueryResponse getAsyncResponse(Client client, String id) {
+        try {
+            var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueMillis(1));
+            return client.execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS);
+        } catch (ElasticsearchTimeoutException e) {
+            throw new AssertionError("timeout waiting for GET async result", e);
+        }
+    }
+}
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java
new file mode 100644
index 0000000000000..010931432e2e8
--- /dev/null
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/FailingPauseFieldPlugin.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.action;
+
+import org.elasticsearch.ElasticsearchException;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A plugin that provides a script language "pause_fail" that can be used to make queries fail in a predictable way.
+ */
+public class FailingPauseFieldPlugin extends AbstractPauseFieldPlugin {
+    public static CountDownLatch startEmitting = new CountDownLatch(1);
+    public static CountDownLatch allowEmitting = new CountDownLatch(1);
+
+    @Override
+    protected String scriptTypeName() {
+        return "pause_fail";
+    }
+
+    public static void resetPlugin() {
+        allowEmitting = new CountDownLatch(1);
+        startEmitting = new CountDownLatch(1);
+    }
+
+    @Override
+    public void onStartExecute() {
+        startEmitting.countDown();
+    }
+
+    @Override
+    public boolean onWait() throws InterruptedException {
+        allowEmitting.await(30, TimeUnit.SECONDS);
+        throw new ElasticsearchException("Failing query");
+    }
+}
diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java
new file mode 100644
index 0000000000000..97b4cba0d9938
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBooleanEvaluator.java
@@ -0,0 +1,225 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.nulls;
+
+import org.elasticsearch.compute.data.Block;
+import org.elasticsearch.compute.data.BooleanBlock;
+import org.elasticsearch.compute.data.Page;
+import org.elasticsearch.compute.operator.DriverContext;
+import org.elasticsearch.compute.operator.EvalOperator;
+import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
+import org.elasticsearch.core.Releasable;
+import org.elasticsearch.core.Releasables;
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper;
+
+import java.util.List;
+import java.util.stream.IntStream;
+
+/**
+ * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}.
+ * This class is generated. Edit {@code X-InEvaluator.java.st} instead.
+ */
+abstract sealed class CoalesceBooleanEvaluator implements EvalOperator.ExpressionEvaluator permits
+    CoalesceBooleanEvaluator.CoalesceBooleanEagerEvaluator, //
+    CoalesceBooleanEvaluator.CoalesceBooleanLazyEvaluator {
+
+    static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) {
+        List childEvaluators = children.stream().map(toEvaluator::apply).toList();
+        if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) {
+            return new ExpressionEvaluator.Factory() {
+                @Override
+                public ExpressionEvaluator get(DriverContext context) {
+                    return new CoalesceBooleanEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList());
+                }
+
+                @Override
+                public String toString() {
+                    return "CoalesceBooleanEagerEvaluator[values=" + childEvaluators + ']';
+                }
+            };
+        }
+        return new ExpressionEvaluator.Factory() {
+            @Override
+            public ExpressionEvaluator get(DriverContext context) {
+                return new CoalesceBooleanLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList());
+            }
+
+            @Override
+            public String toString() {
+                return "CoalesceBooleanLazyEvaluator[values=" + childEvaluators + ']';
+            }
+        };
+    }
+
+    protected final DriverContext driverContext;
+    protected final List evaluators;
+
+    protected CoalesceBooleanEvaluator(DriverContext driverContext, List evaluators) {
+        this.driverContext = driverContext;
+        this.evaluators = evaluators;
+    }
+
+    @Override
+    public final BooleanBlock eval(Page page) {
+        return entireBlock(page);
+    }
+
+    /**
+     * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to
+     * {@link #perPosition} evaluation.
+     * 

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private BooleanBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + BooleanBlock lastFullBlock = (BooleanBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract BooleanBlock perPosition(Page page, BooleanBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceBooleanLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceBooleanEagerEvaluator extends CoalesceBooleanEvaluator { + CoalesceBooleanEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BooleanBlock perPosition(Page page, BooleanBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + BooleanBlock[] flatten = new BooleanBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (BooleanBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (BooleanBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceBooleanLazyEvaluator extends CoalesceBooleanEvaluator { + CoalesceBooleanLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BooleanBlock perPosition(Page page, BooleanBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (BooleanBlock block = (BooleanBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java new file mode 100644 index 0000000000000..7d6834e765a96 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceBytesRefEvaluator.java @@ -0,0 +1,228 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceBytesRefEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceBytesRefEvaluator.CoalesceBytesRefEagerEvaluator, // + CoalesceBytesRefEvaluator.CoalesceBytesRefLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceBytesRefEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceBytesRefEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceBytesRefLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceBytesRefLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceBytesRefEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final BytesRefBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private BytesRefBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + BytesRefBlock lastFullBlock = (BytesRefBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract BytesRefBlock perPosition(Page page, BytesRefBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceBytesRefLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceBytesRefEagerEvaluator extends CoalesceBytesRefEvaluator { + CoalesceBytesRefEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BytesRefBlock perPosition(Page page, BytesRefBlock lastFullBlock, int firstToEvaluate) { + BytesRef scratch = new BytesRef(); + int positionCount = page.getPositionCount(); + BytesRefBlock[] flatten = new BytesRefBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (BytesRefBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (BytesRefBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p, scratch); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceBytesRefLazyEvaluator extends CoalesceBytesRefEvaluator { + CoalesceBytesRefLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected BytesRefBlock perPosition(Page page, BytesRefBlock lastFullBlock, int firstToEvaluate) { + BytesRef scratch = new BytesRef(); + int positionCount = page.getPositionCount(); + try (BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (BytesRefBlock block = (BytesRefBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0, scratch); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java new file mode 100644 index 0000000000000..4c01a961ecbee --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceDoubleEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceDoubleEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceDoubleEvaluator.CoalesceDoubleEagerEvaluator, // + CoalesceDoubleEvaluator.CoalesceDoubleLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceDoubleEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceDoubleEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceDoubleLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceDoubleLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceDoubleEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final DoubleBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private DoubleBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + DoubleBlock lastFullBlock = (DoubleBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract DoubleBlock perPosition(Page page, DoubleBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceDoubleLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceDoubleEagerEvaluator extends CoalesceDoubleEvaluator { + CoalesceDoubleEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected DoubleBlock perPosition(Page page, DoubleBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + DoubleBlock[] flatten = new DoubleBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (DoubleBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (DoubleBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceDoubleLazyEvaluator extends CoalesceDoubleEvaluator { + CoalesceDoubleLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected DoubleBlock perPosition(Page page, DoubleBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (DoubleBlock block = (DoubleBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java new file mode 100644 index 0000000000000..e90bd4b8e5e35 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceIntEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceIntEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceIntEvaluator.CoalesceIntEagerEvaluator, // + CoalesceIntEvaluator.CoalesceIntLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceIntEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceIntEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceIntLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceIntLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceIntEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final IntBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private IntBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + IntBlock lastFullBlock = (IntBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract IntBlock perPosition(Page page, IntBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceIntLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceIntEagerEvaluator extends CoalesceIntEvaluator { + CoalesceIntEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected IntBlock perPosition(Page page, IntBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + IntBlock[] flatten = new IntBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (IntBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (IntBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceIntLazyEvaluator extends CoalesceIntEvaluator { + CoalesceIntLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected IntBlock perPosition(Page page, IntBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (IntBlock block = (IntBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java new file mode 100644 index 0000000000000..53a21ad1198f4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated-src/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceLongEvaluator.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class CoalesceLongEvaluator implements EvalOperator.ExpressionEvaluator permits + CoalesceLongEvaluator.CoalesceLongEagerEvaluator, // + CoalesceLongEvaluator.CoalesceLongLazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceLongEagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceLongEagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceLongLazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "CoalesceLongLazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected CoalesceLongEvaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final LongBlock eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private LongBlock entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + LongBlock lastFullBlock = (LongBlock) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract LongBlock perPosition(Page page, LongBlock lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link CoalesceLongLazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class CoalesceLongEagerEvaluator extends CoalesceLongEvaluator { + CoalesceLongEagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected LongBlock perPosition(Page page, LongBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + LongBlock[] flatten = new LongBlock[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = (LongBlock) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try (LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for (LongBlock f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class CoalesceLongLazyEvaluator extends CoalesceLongEvaluator { + CoalesceLongLazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected LongBlock perPosition(Page page, LongBlock lastFullBlock, int firstToEvaluate) { + int positionCount = page.getPositionCount(); + try (LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try (LongBlock block = (LongBlock) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java new file mode 100644 index 0000000000000..0ff047f9bd819 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantMillisEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillis(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampVector, + LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillis(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffConstantMillisEvaluator get(DriverContext context) { + return new DateDiffConstantMillisEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java new file mode 100644 index 0000000000000..880531ca53707 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantMillisNanosEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator endTimestampNanos; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantMillisNanosEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestampMillis, + EvalOperator.ExpressionEvaluator endTimestampNanos, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampMillisBlock = (LongBlock) startTimestampMillis.eval(page)) { + try (LongBlock endTimestampNanosBlock = (LongBlock) endTimestampNanos.eval(page)) { + LongVector startTimestampMillisVector = startTimestampMillisBlock.asVector(); + if (startTimestampMillisVector == null) { + return eval(page.getPositionCount(), startTimestampMillisBlock, endTimestampNanosBlock); + } + LongVector endTimestampNanosVector = endTimestampNanosBlock.asVector(); + if (endTimestampNanosVector == null) { + return eval(page.getPositionCount(), startTimestampMillisBlock, endTimestampNanosBlock); + } + return eval(page.getPositionCount(), startTimestampMillisVector, endTimestampNanosVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampMillisBlock, + LongBlock endTimestampNanosBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampMillisBlock.getValueCount(p) != 1) { + if (startTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.getValueCount(p) != 1) { + if (endTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillisNanos(this.datePartFieldUnit, startTimestampMillisBlock.getLong(startTimestampMillisBlock.getFirstValueIndex(p)), endTimestampNanosBlock.getLong(endTimestampNanosBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampMillisVector, + LongVector endTimestampNanosVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillisNanos(this.datePartFieldUnit, startTimestampMillisVector.getLong(p), endTimestampNanosVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantMillisNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestampMillis, endTimestampNanos); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampNanos; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestampMillis, + EvalOperator.ExpressionEvaluator.Factory endTimestampNanos) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + } + + @Override + public DateDiffConstantMillisNanosEvaluator get(DriverContext context) { + return new DateDiffConstantMillisNanosEvaluator(source, datePartFieldUnit, startTimestampMillis.get(context), endTimestampNanos.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantMillisNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java new file mode 100644 index 0000000000000..99f7d1cb2e247 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantNanosEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), startTimestampVector, endTimestampVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanos(this.datePartFieldUnit, startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampVector, + LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanos(this.datePartFieldUnit, startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffConstantNanosEvaluator get(DriverContext context) { + return new DateDiffConstantNanosEvaluator(source, datePartFieldUnit, startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantNanosEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java new file mode 100644 index 0000000000000..842930a040ed0 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffConstantNanosMillisEvaluator.java @@ -0,0 +1,168 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffConstantNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator endTimestampMillis; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffConstantNanosMillisEvaluator(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator startTimestampNanos, + EvalOperator.ExpressionEvaluator endTimestampMillis, DriverContext driverContext) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock startTimestampNanosBlock = (LongBlock) startTimestampNanos.eval(page)) { + try (LongBlock endTimestampMillisBlock = (LongBlock) endTimestampMillis.eval(page)) { + LongVector startTimestampNanosVector = startTimestampNanosBlock.asVector(); + if (startTimestampNanosVector == null) { + return eval(page.getPositionCount(), startTimestampNanosBlock, endTimestampMillisBlock); + } + LongVector endTimestampMillisVector = endTimestampMillisBlock.asVector(); + if (endTimestampMillisVector == null) { + return eval(page.getPositionCount(), startTimestampNanosBlock, endTimestampMillisBlock); + } + return eval(page.getPositionCount(), startTimestampNanosVector, endTimestampMillisVector); + } + } + } + + public IntBlock eval(int positionCount, LongBlock startTimestampNanosBlock, + LongBlock endTimestampMillisBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (startTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampNanosBlock.getValueCount(p) != 1) { + if (startTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.getValueCount(p) != 1) { + if (endTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanosMillis(this.datePartFieldUnit, startTimestampNanosBlock.getLong(startTimestampNanosBlock.getFirstValueIndex(p)), endTimestampMillisBlock.getLong(endTimestampMillisBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, LongVector startTimestampNanosVector, + LongVector endTimestampMillisVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanosMillis(this.datePartFieldUnit, startTimestampNanosVector.getLong(p), endTimestampMillisVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffConstantNanosMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(startTimestampNanos, endTimestampMillis); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final DateDiff.Part datePartFieldUnit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampMillis; + + public Factory(Source source, DateDiff.Part datePartFieldUnit, + EvalOperator.ExpressionEvaluator.Factory startTimestampNanos, + EvalOperator.ExpressionEvaluator.Factory endTimestampMillis) { + this.source = source; + this.datePartFieldUnit = datePartFieldUnit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + } + + @Override + public DateDiffConstantNanosMillisEvaluator get(DriverContext context) { + return new DateDiffConstantNanosMillisEvaluator(source, datePartFieldUnit, startTimestampNanos.get(context), endTimestampMillis.get(context), context); + } + + @Override + public String toString() { + return "DateDiffConstantNanosMillisEvaluator[" + "datePartFieldUnit=" + datePartFieldUnit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java new file mode 100644 index 0000000000000..a464d0c5cafc7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillis(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampVector, LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillis(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffMillisEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffMillisEvaluator get(DriverContext context) { + return new DateDiffMillisEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffMillisEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java new file mode 100644 index 0000000000000..4586e2cb720fd --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffMillisNanosEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffMillisNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator endTimestampNanos; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffMillisNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestampMillis, + EvalOperator.ExpressionEvaluator endTimestampNanos, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampMillisBlock = (LongBlock) startTimestampMillis.eval(page)) { + try (LongBlock endTimestampNanosBlock = (LongBlock) endTimestampNanos.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampMillisBlock, endTimestampNanosBlock); + } + LongVector startTimestampMillisVector = startTimestampMillisBlock.asVector(); + if (startTimestampMillisVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampMillisBlock, endTimestampNanosBlock); + } + LongVector endTimestampNanosVector = endTimestampNanosBlock.asVector(); + if (endTimestampNanosVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampMillisBlock, endTimestampNanosBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampMillisVector, endTimestampNanosVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, + LongBlock startTimestampMillisBlock, LongBlock endTimestampNanosBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampMillisBlock.getValueCount(p) != 1) { + if (startTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampNanosBlock.getValueCount(p) != 1) { + if (endTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processMillisNanos(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampMillisBlock.getLong(startTimestampMillisBlock.getFirstValueIndex(p)), endTimestampNanosBlock.getLong(endTimestampNanosBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampMillisVector, LongVector endTimestampNanosVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processMillisNanos(unitVector.getBytesRef(p, unitScratch), startTimestampMillisVector.getLong(p), endTimestampNanosVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffMillisNanosEvaluator[" + "unit=" + unit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestampMillis, endTimestampNanos); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampMillis; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampNanos; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestampMillis, + EvalOperator.ExpressionEvaluator.Factory endTimestampNanos) { + this.source = source; + this.unit = unit; + this.startTimestampMillis = startTimestampMillis; + this.endTimestampNanos = endTimestampNanos; + } + + @Override + public DateDiffMillisNanosEvaluator get(DriverContext context) { + return new DateDiffMillisNanosEvaluator(source, unit.get(context), startTimestampMillis.get(context), endTimestampNanos.get(context), context); + } + + @Override + public String toString() { + return "DateDiffMillisNanosEvaluator[" + "unit=" + unit + ", startTimestampMillis=" + startTimestampMillis + ", endTimestampNanos=" + endTimestampNanos + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java new file mode 100644 index 0000000000000..95a54c3a24ec5 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffNanosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestamp; + + private final EvalOperator.ExpressionEvaluator endTimestamp; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestamp, + EvalOperator.ExpressionEvaluator endTimestamp, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampBlock = (LongBlock) startTimestamp.eval(page)) { + try (LongBlock endTimestampBlock = (LongBlock) endTimestamp.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector startTimestampVector = startTimestampBlock.asVector(); + if (startTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + LongVector endTimestampVector = endTimestampBlock.asVector(); + if (endTimestampVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampBlock, endTimestampBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampVector, endTimestampVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, LongBlock startTimestampBlock, + LongBlock endTimestampBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampBlock.getValueCount(p) != 1) { + if (startTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampBlock.getValueCount(p) != 1) { + if (endTimestampBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanos(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampBlock.getLong(startTimestampBlock.getFirstValueIndex(p)), endTimestampBlock.getLong(endTimestampBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampVector, LongVector endTimestampVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanos(unitVector.getBytesRef(p, unitScratch), startTimestampVector.getLong(p), endTimestampVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffNanosEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestamp, endTimestamp); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestamp; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestamp; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestamp, + EvalOperator.ExpressionEvaluator.Factory endTimestamp) { + this.source = source; + this.unit = unit; + this.startTimestamp = startTimestamp; + this.endTimestamp = endTimestamp; + } + + @Override + public DateDiffNanosEvaluator get(DriverContext context) { + return new DateDiffNanosEvaluator(source, unit.get(context), startTimestamp.get(context), endTimestamp.get(context), context); + } + + @Override + public String toString() { + return "DateDiffNanosEvaluator[" + "unit=" + unit + ", startTimestamp=" + startTimestamp + ", endTimestamp=" + endTimestamp + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java new file mode 100644 index 0000000000000..a7694647aec54 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffNanosMillisEvaluator.java @@ -0,0 +1,190 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.InvalidArgumentException; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateDiff}. + * This class is generated. Do not edit it. + */ +public final class DateDiffNanosMillisEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator unit; + + private final EvalOperator.ExpressionEvaluator startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator endTimestampMillis; + + private final DriverContext driverContext; + + private Warnings warnings; + + public DateDiffNanosMillisEvaluator(Source source, EvalOperator.ExpressionEvaluator unit, + EvalOperator.ExpressionEvaluator startTimestampNanos, + EvalOperator.ExpressionEvaluator endTimestampMillis, DriverContext driverContext) { + this.source = source; + this.unit = unit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock unitBlock = (BytesRefBlock) unit.eval(page)) { + try (LongBlock startTimestampNanosBlock = (LongBlock) startTimestampNanos.eval(page)) { + try (LongBlock endTimestampMillisBlock = (LongBlock) endTimestampMillis.eval(page)) { + BytesRefVector unitVector = unitBlock.asVector(); + if (unitVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampNanosBlock, endTimestampMillisBlock); + } + LongVector startTimestampNanosVector = startTimestampNanosBlock.asVector(); + if (startTimestampNanosVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampNanosBlock, endTimestampMillisBlock); + } + LongVector endTimestampMillisVector = endTimestampMillisBlock.asVector(); + if (endTimestampMillisVector == null) { + return eval(page.getPositionCount(), unitBlock, startTimestampNanosBlock, endTimestampMillisBlock); + } + return eval(page.getPositionCount(), unitVector, startTimestampNanosVector, endTimestampMillisVector); + } + } + } + } + + public IntBlock eval(int positionCount, BytesRefBlock unitBlock, + LongBlock startTimestampNanosBlock, LongBlock endTimestampMillisBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (unitBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (unitBlock.getValueCount(p) != 1) { + if (unitBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startTimestampNanosBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startTimestampNanosBlock.getValueCount(p) != 1) { + if (startTimestampNanosBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (endTimestampMillisBlock.getValueCount(p) != 1) { + if (endTimestampMillisBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(DateDiff.processNanosMillis(unitBlock.getBytesRef(unitBlock.getFirstValueIndex(p), unitScratch), startTimestampNanosBlock.getLong(startTimestampNanosBlock.getFirstValueIndex(p)), endTimestampMillisBlock.getLong(endTimestampMillisBlock.getFirstValueIndex(p)))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, BytesRefVector unitVector, + LongVector startTimestampNanosVector, LongVector endTimestampMillisVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + BytesRef unitScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(DateDiff.processNanosMillis(unitVector.getBytesRef(p, unitScratch), startTimestampNanosVector.getLong(p), endTimestampMillisVector.getLong(p))); + } catch (IllegalArgumentException | InvalidArgumentException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "DateDiffNanosMillisEvaluator[" + "unit=" + unit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(unit, startTimestampNanos, endTimestampMillis); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory unit; + + private final EvalOperator.ExpressionEvaluator.Factory startTimestampNanos; + + private final EvalOperator.ExpressionEvaluator.Factory endTimestampMillis; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory unit, + EvalOperator.ExpressionEvaluator.Factory startTimestampNanos, + EvalOperator.ExpressionEvaluator.Factory endTimestampMillis) { + this.source = source; + this.unit = unit; + this.startTimestampNanos = startTimestampNanos; + this.endTimestampMillis = endTimestampMillis; + } + + @Override + public DateDiffNanosMillisEvaluator get(DriverContext context) { + return new DateDiffNanosMillisEvaluator(source, unit.get(context), startTimestampNanos.get(context), endTimestampMillis.get(context), context); + } + + @Override + public String toString() { + return "DateDiffNanosMillisEvaluator[" + "unit=" + unit + ", startTimestampNanos=" + startTimestampNanos + ", endTimestampMillis=" + endTimestampMillis + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java new file mode 100644 index 0000000000000..2178d0bf1e9fd --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncStopAction.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.xpack.core.esql.EsqlAsyncActionNames; + +public class EsqlAsyncStopAction extends ActionType { + + public static final EsqlAsyncStopAction INSTANCE = new EsqlAsyncStopAction(); + + public static final String NAME = EsqlAsyncActionNames.ESQL_ASYNC_STOP_ACTION_NAME; + + private EsqlAsyncStopAction() { + super(NAME); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 08e0f0cf473ef..e4c591f8f6b19 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -503,7 +503,10 @@ public enum Cap { * Support running date format function on nanosecond dates */ DATE_NANOS_DATE_FORMAT(), - + /** + * support date diff function on date nanos type, and mixed nanos/millis + */ + DATE_NANOS_DATE_DIFF(), /** * DATE_PARSE supports reading timezones */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index 61c7135cef42d..c1e43a74c2273 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -19,6 +19,7 @@ import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xcontent.ParseField; @@ -57,6 +58,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable { public static final ParseField FAILED_FIELD = new ParseField("failed"); public static final ParseField DETAILS_FIELD = new ParseField("details"); public static final ParseField TOOK = new ParseField("took"); + public static final ParseField IS_PARTIAL_FIELD = new ParseField("is_partial"); // Map key is clusterAlias on the primary querying cluster of a CCS minimize_roundtrips=true query // The Map itself is immutable after construction - all Clusters will be accounted for at the start of the search. @@ -71,6 +73,7 @@ public class EsqlExecutionInfo implements ChunkedToXContentObject, Writeable { private final transient Predicate skipUnavailablePredicate; private final transient Long relativeStartNanos; // start time for an ESQL query for calculating took times private transient TimeValue planningTookTime; // time elapsed since start of query to calling ComputeService.execute + private volatile boolean isPartial; // Does this request have partial results? public EsqlExecutionInfo(boolean includeCCSMetadata) { this(Predicates.always(), includeCCSMetadata); // default all clusters to skip_unavailable=true @@ -113,6 +116,13 @@ public EsqlExecutionInfo(StreamInput in) throws IOException { } else { this.includeCCSMetadata = false; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_RESPONSE_PARTIAL)) { + this.isPartial = in.readBoolean(); + } else { + this.isPartial = false; + } + this.skipUnavailablePredicate = Predicates.always(); this.relativeStartNanos = null; } @@ -128,6 +138,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(includeCCSMetadata); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_RESPONSE_PARTIAL)) { + out.writeBoolean(isPartial); + } } public boolean includeCCSMetadata() { @@ -188,7 +201,7 @@ public Set clusterAliases() { /** * @param clusterAlias to lookup skip_unavailable from * @return skip_unavailable setting (true/false) - * @throws org.elasticsearch.transport.NoSuchRemoteClusterException if clusterAlias is unknown to this node's RemoteClusterService + * @throws NoSuchRemoteClusterException if clusterAlias is unknown to this node's RemoteClusterService */ public boolean isSkipUnavailable(String clusterAlias) { if (RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY.equals(clusterAlias)) { @@ -279,6 +292,24 @@ public int hashCode() { return Objects.hash(clusterInfo, overallTook); } + public boolean isPartial() { + return isPartial; + } + + /** + * Mark the query as having partial results. + */ + public void markAsPartial() { + isPartial = true; + } + + /** + * Mark this cluster as having partial results. + */ + public void markClusterAsPartial(String clusterAlias) { + swapCluster(clusterAlias, (k, v) -> new Cluster.Builder(v).setStatus(Cluster.Status.PARTIAL).build()); + } + /** * Represents the search metadata about a particular cluster involved in a cross-cluster search. * The Cluster object can represent either the local cluster or a remote cluster. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 8530d9b48da08..26b5329589421 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -208,6 +208,7 @@ public Iterator toXContentChunked(ToXContent.Params params if (executionInfo != null && executionInfo.overallTook() != null) { tookTime = ChunkedToXContentHelper.chunk((builder, p) -> { builder.field("took", executionInfo.overallTook().millis()); + builder.field(EsqlExecutionInfo.IS_PARTIAL_FIELD.getPreferredName(), executionInfo.isPartial()); return builder; }); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java new file mode 100644 index 0000000000000..c7477f738e95a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlStopAsyncAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.xpack.core.async.AsyncStopRequest; + +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION; +import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; + +@ServerlessScope(Scope.PUBLIC) +public class RestEsqlStopAsyncAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new Route(POST, "/_query/async/{id}/stop")); + } + + @Override + public String getName() { + return "esql_async_stop"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + AsyncStopRequest stopReq = new AsyncStopRequest(request.param("id")); + return channel -> client.execute(EsqlAsyncStopAction.INSTANCE, stopReq, new EsqlResponseListener(channel, request)); + } + + @Override + protected Set responseParams() { + return Set.of(URL_PARAM_DELIMITER, DROP_NULL_COLUMNS_OPTION); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java new file mode 100644 index 0000000000000..86f1f6e30108c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunction.java @@ -0,0 +1,219 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; +import org.elasticsearch.xpack.esql.common.Failure; +import org.elasticsearch.xpack.esql.common.Failures; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNull; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; +import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.formatIncompatibleTypesMessage; + +/** + * This class contains the common functionalities between the match function ({@link Match}) and match operator ({@link MatchOperator}), + * so the two subclasses just contains the different code + */ +public abstract class AbstractMatchFullTextFunction extends FullTextFunction implements PostOptimizationVerificationAware { + public static final Set FIELD_DATA_TYPES = Set.of( + KEYWORD, + TEXT, + SEMANTIC_TEXT, + BOOLEAN, + DATETIME, + DATE_NANOS, + DOUBLE, + INTEGER, + IP, + LONG, + UNSIGNED_LONG, + VERSION + ); + public static final Set QUERY_DATA_TYPES = Set.of( + KEYWORD, + BOOLEAN, + DATETIME, + DATE_NANOS, + DOUBLE, + INTEGER, + IP, + LONG, + UNSIGNED_LONG, + VERSION + ); + protected final Expression field; + + protected AbstractMatchFullTextFunction( + Source source, + Expression query, + List children, + QueryBuilder queryBuilder, + Expression field + ) { + super(source, query, children, queryBuilder); + this.field = field; + } + + public Expression field() { + return field; + } + + @Override + protected TypeResolution resolveNonQueryParamTypes() { + return isNotNull(field, sourceText(), FIRST).and( + isType( + field, + FIELD_DATA_TYPES::contains, + sourceText(), + FIRST, + "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" + ) + ); + } + + @Override + protected TypeResolution resolveQueryParamType() { + return isType( + query(), + QUERY_DATA_TYPES::contains, + sourceText(), + queryParamOrdinal(), + "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" + ).and(isNotNullAndFoldable(query(), sourceText(), queryParamOrdinal())); + } + + @Override + protected TypeResolution checkParamCompatibility() { + DataType fieldType = field().dataType(); + DataType queryType = query().dataType(); + + // Field and query types should match. If the query is a string, then it can match any field type. + if ((fieldType == queryType) || (queryType == KEYWORD)) { + return TypeResolution.TYPE_RESOLVED; + } + + if (fieldType.isNumeric() && queryType.isNumeric()) { + // When doing an unsigned long query, field must be an unsigned long + if ((queryType == UNSIGNED_LONG && fieldType != UNSIGNED_LONG) == false) { + return TypeResolution.TYPE_RESOLVED; + } + } + + return new TypeResolution(formatIncompatibleTypesMessage(fieldType, queryType, sourceText())); + } + + @Override + public void postOptimizationVerification(Failures failures) { + Expression fieldExpression = field(); + // Field may be converted to other data type (field_name :: data_type), so we need to check the original field + if (fieldExpression instanceof AbstractConvertFunction convertFunction) { + fieldExpression = convertFunction.field(); + } + if (fieldExpression instanceof FieldAttribute == false) { + failures.add( + Failure.fail( + field, + "[{}] {} cannot operate on [{}], which is not a field from an index mapping", + functionName(), + functionType(), + field.sourceText() + ) + ); + } + } + + @Override + public Object queryAsObject() { + Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); + + // Convert BytesRef to string for string-based values + if (queryAsObject instanceof BytesRef bytesRef) { + return switch (query().dataType()) { + case IP -> EsqlDataTypeConverter.ipToString(bytesRef); + case VERSION -> EsqlDataTypeConverter.versionToString(bytesRef); + default -> bytesRef.utf8ToString(); + }; + } + + // Converts specific types to the correct type for the query + if (query().dataType() == DataType.UNSIGNED_LONG) { + return NumericUtils.unsignedLongAsBigInteger((Long) queryAsObject); + } else if (query().dataType() == DataType.DATETIME && queryAsObject instanceof Long) { + // When casting to date and datetime, we get a long back. But Match query needs a date string + return EsqlDataTypeConverter.dateTimeToString((Long) queryAsObject); + } else if (query().dataType() == DATE_NANOS && queryAsObject instanceof Long) { + return EsqlDataTypeConverter.nanoTimeToString((Long) queryAsObject); + } + + return queryAsObject; + } + + @Override + protected Query translate(TranslatorHandler handler) { + Expression fieldExpression = field; + // Field may be converted to other data type (field_name :: data_type), so we need to check the original field + if (fieldExpression instanceof AbstractConvertFunction convertFunction) { + fieldExpression = convertFunction.field(); + } + if (fieldExpression instanceof FieldAttribute fieldAttribute) { + String fieldName = fieldAttribute.name(); + if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { + // If we have multiple field types, we allow the query to be done, but getting the underlying field name + fieldName = multiTypeEsField.getName(); + } + // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided + return new MatchQuery(source(), fieldName, queryAsObject(), Map.of("lenient", "true")); + } + + throw new IllegalArgumentException("Match must have a field attribute as the first argument"); + } + + @Override + public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { + return new Match(source(), field, query(), queryBuilder); + } + + protected ParamOrdinal queryParamOrdinal() { + return SECOND; + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index a3a14004b1c89..f42433c22e775 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -25,6 +25,7 @@ public static List getNamedWriteables() { entries.add(MultiMatchQueryPredicate.ENTRY); entries.add(QueryString.ENTRY); entries.add(Match.ENTRY); + entries.add(MatchOperator.ENTRY); entries.add(Kql.ENTRY); if (EsqlCapabilities.Cap.TERM_FUNCTION.isEnabled()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index b7ebcda70b622..ea5f3d9b83543 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -7,101 +7,33 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xpack.esql.capabilities.PostOptimizationVerificationAware; -import org.elasticsearch.xpack.esql.common.Failure; -import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; -import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.TranslatorHandler; -import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; -import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.io.IOException; import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNull; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; -import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; -import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; -import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; -import static org.elasticsearch.xpack.esql.core.type.DataType.IP; -import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; -import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.SEMANTIC_TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; -import static org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.formatIncompatibleTypesMessage; /** - * Full text function that performs a {@link QueryStringQuery} . + * Full text function that performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchQuery} . */ -public class Match extends FullTextFunction implements PostOptimizationVerificationAware { +public class Match extends AbstractMatchFullTextFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Match", Match::readFrom); - private final Expression field; - private transient Boolean isOperator; - public static final Set FIELD_DATA_TYPES = Set.of( - KEYWORD, - TEXT, - SEMANTIC_TEXT, - BOOLEAN, - DATETIME, - DATE_NANOS, - DOUBLE, - INTEGER, - IP, - LONG, - UNSIGNED_LONG, - VERSION - ); - public static final Set QUERY_DATA_TYPES = Set.of( - KEYWORD, - BOOLEAN, - DATETIME, - DATE_NANOS, - DOUBLE, - INTEGER, - IP, - LONG, - UNSIGNED_LONG, - VERSION - ); - @FunctionInfo( returnType = "boolean", - operator = ":", preview = true, description = """ Use `MATCH` to perform a <> on the specified field. @@ -132,8 +64,7 @@ public Match( } public Match(Source source, Expression field, Expression matchQuery, QueryBuilder queryBuilder) { - super(source, matchQuery, List.of(field, matchQuery), queryBuilder); - this.field = field; + super(source, matchQuery, List.of(field, matchQuery), queryBuilder, field); } private static Match readFrom(StreamInput in) throws IOException { @@ -162,96 +93,6 @@ public String getWriteableName() { return ENTRY.name; } - @Override - protected TypeResolution resolveNonQueryParamTypes() { - return isNotNull(field, sourceText(), FIRST).and( - isType( - field, - FIELD_DATA_TYPES::contains, - sourceText(), - FIRST, - "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" - ) - ); - } - - @Override - protected TypeResolution resolveQueryParamType() { - return isType( - query(), - QUERY_DATA_TYPES::contains, - sourceText(), - queryParamOrdinal(), - "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version" - ).and(isNotNullAndFoldable(query(), sourceText(), queryParamOrdinal())); - } - - @Override - protected TypeResolution checkParamCompatibility() { - DataType fieldType = field().dataType(); - DataType queryType = query().dataType(); - - // Field and query types should match. If the query is a string, then it can match any field type. - if ((fieldType == queryType) || (queryType == KEYWORD)) { - return TypeResolution.TYPE_RESOLVED; - } - - if (fieldType.isNumeric() && queryType.isNumeric()) { - // When doing an unsigned long query, field must be an unsigned long - if ((queryType == UNSIGNED_LONG && fieldType != UNSIGNED_LONG) == false) { - return TypeResolution.TYPE_RESOLVED; - } - } - - return new TypeResolution(formatIncompatibleTypesMessage(fieldType, queryType, sourceText())); - } - - @Override - public void postOptimizationVerification(Failures failures) { - Expression fieldExpression = field(); - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute == false) { - failures.add( - Failure.fail( - field, - "[{}] {} cannot operate on [{}], which is not a field from an index mapping", - functionName(), - functionType(), - field.sourceText() - ) - ); - } - } - - @Override - public Object queryAsObject() { - Object queryAsObject = query().fold(FoldContext.small() /* TODO remove me */); - - // Convert BytesRef to string for string-based values - if (queryAsObject instanceof BytesRef bytesRef) { - return switch (query().dataType()) { - case IP -> EsqlDataTypeConverter.ipToString(bytesRef); - case VERSION -> EsqlDataTypeConverter.versionToString(bytesRef); - default -> bytesRef.utf8ToString(); - }; - } - - // Converts specific types to the correct type for the query - if (query().dataType() == DataType.UNSIGNED_LONG) { - return NumericUtils.unsignedLongAsBigInteger((Long) queryAsObject); - } else if (query().dataType() == DataType.DATETIME && queryAsObject instanceof Long) { - // When casting to date and datetime, we get a long back. But Match query needs a date string - return EsqlDataTypeConverter.dateTimeToString((Long) queryAsObject); - } else if (query().dataType() == DATE_NANOS && queryAsObject instanceof Long) { - return EsqlDataTypeConverter.nanoTimeToString((Long) queryAsObject); - } - - return queryAsObject; - } - @Override public Expression replaceChildren(List newChildren) { return new Match(source(), newChildren.get(0), newChildren.get(1), queryBuilder()); @@ -259,56 +100,6 @@ public Expression replaceChildren(List newChildren) { @Override protected NodeInfo info() { - return NodeInfo.create(this, Match::new, field, query(), queryBuilder()); - } - - protected TypeResolutions.ParamOrdinal queryParamOrdinal() { - return SECOND; - } - - public Expression field() { - return field; - } - - @Override - public String functionType() { - return isOperator() ? "operator" : super.functionType(); - } - - @Override - protected Query translate(TranslatorHandler handler) { - Expression fieldExpression = field; - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute fieldAttribute) { - String fieldName = fieldAttribute.name(); - if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { - // If we have multiple field types, we allow the query to be done, but getting the underlying field name - fieldName = multiTypeEsField.getName(); - } - // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided - return new MatchQuery(source(), fieldName, queryAsObject(), Map.of("lenient", "true")); - } - - throw new IllegalArgumentException("Match must have a field attribute as the first argument"); - } - - @Override - public Expression replaceQueryBuilder(QueryBuilder queryBuilder) { - return new Match(source(), field, query(), queryBuilder); - } - - @Override - public String functionName() { - return isOperator() ? ":" : super.functionName(); - } - - private boolean isOperator() { - if (isOperator == null) { - isOperator = source().text().toUpperCase(Locale.ROOT).matches("^" + super.functionName() + "\\s*\\(.*\\)") == false; - } - return isOperator; + return NodeInfo.create(this, Match::new, field(), query(), queryBuilder()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java new file mode 100644 index 0000000000000..e3e4bc4678089 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperator.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +/** + * This class performs a {@link org.elasticsearch.xpack.esql.querydsl.query.MatchQuery} using an operator. + */ +public class MatchOperator extends Match { + + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "MatchOperator", + MatchOperator::readFrom + ); + + @FunctionInfo( + returnType = "boolean", + operator = ":", + preview = true, + description = """ + Use the match operator (`:`) to perform a <> on the specified field. + Using `:` is equivalent to using the `match` query in the Elasticsearch Query DSL. + + The match operator is equivalent to the <>. + + For using the function syntax, or adding <>, you can use the + <>. + + `:` returns true if the provided query matches the row.""", + examples = { @Example(file = "match-function", tag = "match-with-field") } + ) + public MatchOperator( + Source source, + @Param( + name = "field", + type = { "keyword", "text", "boolean", "date", "date_nanos", "double", "integer", "ip", "long", "unsigned_long", "version" }, + description = "Field that the query will target." + ) Expression field, + @Param( + name = "query", + type = { "keyword", "boolean", "date", "date_nanos", "double", "integer", "ip", "long", "unsigned_long", "version" }, + description = "Value to find in the provided field." + ) Expression matchQuery + ) { + super(source, field, matchQuery); + } + + private static Match readFrom(StreamInput in) throws IOException { + Source source = Source.readFrom((PlanStreamInput) in); + Expression field = in.readNamedWriteable(Expression.class); + Expression query = in.readNamedWriteable(Expression.class); + + return new MatchOperator(source, field, query); + } + + @Override + public String functionType() { + return "operator"; + } + + @Override + public String functionName() { + return ":"; + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, MatchOperator::new, field(), query()); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new MatchOperator(source(), newChildren.get(0), newChildren.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 236e625f7abe1..04da04e1b3927 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -524,6 +524,9 @@ public Block eval(Page page) { ) { for (int p = 0; p < lhs.getPositionCount(); p++) { if (lhsOrRhs.mask().getBoolean(p)) { + // TODO Copy the per-type specialization that COALESCE has. + // There's also a slowdown because copying from a block checks to see if there are any nulls and that's slow. + // Vectors do not, so this still shows as fairly fast. But not as fast as the per-type unrolling. builder.copyFrom(lhs, p, p + 1); } else { builder.copyFrom(rhs, p, p + 1); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index b588832aba4cb..4d843ea7180a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -11,11 +11,13 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -41,8 +43,9 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToInt; /** @@ -168,10 +171,14 @@ public DateDiff( @Param(name = "unit", type = { "keyword", "text" }, description = "Time difference unit") Expression unit, @Param( name = "startTimestamp", - type = { "date" }, + type = { "date", "date_nanos" }, description = "A string representing a start timestamp" ) Expression startTimestamp, - @Param(name = "endTimestamp", type = { "date" }, description = "A string representing an end timestamp") Expression endTimestamp + @Param( + name = "endTimestamp", + type = { "date", "date_nanos" }, + description = "A string representing an end timestamp" + ) Expression endTimestamp ) { super(source, List.of(unit, startTimestamp, endTimestamp)); this.unit = unit; @@ -213,33 +220,115 @@ Expression endTimestamp() { return endTimestamp; } - @Evaluator(extraName = "Constant", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) - static int process(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + @Evaluator(extraName = "ConstantMillis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillis(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestamp), UTC); ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestamp), UTC); return datePartFieldUnit.diff(zdtStart, zdtEnd); } - @Evaluator(warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) - static int process(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { - return process(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + @Evaluator(extraName = "Millis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillis(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + return processMillis(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + } + + @Evaluator(extraName = "ConstantNanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanos(@Fixed Part datePartFieldUnit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(DateUtils.toInstant(startTimestamp), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(DateUtils.toInstant(endTimestamp), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(extraName = "Nanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanos(BytesRef unit, long startTimestamp, long endTimestamp) throws IllegalArgumentException { + return processNanos(Part.resolve(unit.utf8ToString()), startTimestamp, endTimestamp); + } + + @Evaluator(extraName = "ConstantNanosMillis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanosMillis(@Fixed Part datePartFieldUnit, long startTimestampNanos, long endTimestampMillis) + throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(DateUtils.toInstant(startTimestampNanos), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(Instant.ofEpochMilli(endTimestampMillis), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(extraName = "NanosMillis", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processNanosMillis(BytesRef unit, long startTimestampNanos, long endTimestampMillis) throws IllegalArgumentException { + return processNanosMillis(Part.resolve(unit.utf8ToString()), startTimestampNanos, endTimestampMillis); + } + + @Evaluator(extraName = "ConstantMillisNanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillisNanos(@Fixed Part datePartFieldUnit, long startTimestampMillis, long endTimestampNanos) + throws IllegalArgumentException { + ZonedDateTime zdtStart = ZonedDateTime.ofInstant(Instant.ofEpochMilli(startTimestampMillis), UTC); + ZonedDateTime zdtEnd = ZonedDateTime.ofInstant(DateUtils.toInstant(endTimestampNanos), UTC); + return datePartFieldUnit.diff(zdtStart, zdtEnd); + } + + @Evaluator(extraName = "MillisNanos", warnExceptions = { IllegalArgumentException.class, InvalidArgumentException.class }) + static int processMillisNanos(BytesRef unit, long startTimestampMillis, long endTimestampNanos) throws IllegalArgumentException { + return processMillisNanos(Part.resolve(unit.utf8ToString()), startTimestampMillis, endTimestampNanos); + } + + @FunctionalInterface + public interface DateDiffFactory { + ExpressionEvaluator.Factory build( + Source source, + ExpressionEvaluator.Factory unitsEvaluator, + ExpressionEvaluator.Factory startTimestampEvaluator, + ExpressionEvaluator.Factory endTimestampEvaluator + ); + } + + @FunctionalInterface + public interface DateDiffConstantFactory { + ExpressionEvaluator.Factory build( + Source source, + Part unitsEvaluator, + ExpressionEvaluator.Factory startTimestampEvaluator, + ExpressionEvaluator.Factory endTimestampEvaluator + ); } @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + if (startTimestamp.dataType() == DATETIME && endTimestamp.dataType() == DATETIME) { + return toEvaluator(toEvaluator, DateDiffConstantMillisEvaluator.Factory::new, DateDiffMillisEvaluator.Factory::new); + } else if (startTimestamp.dataType() == DATE_NANOS && endTimestamp.dataType() == DATE_NANOS) { + return toEvaluator(toEvaluator, DateDiffConstantNanosEvaluator.Factory::new, DateDiffNanosEvaluator.Factory::new); + } else if (startTimestamp.dataType() == DATE_NANOS && endTimestamp.dataType() == DATETIME) { + return toEvaluator(toEvaluator, DateDiffConstantNanosMillisEvaluator.Factory::new, DateDiffNanosMillisEvaluator.Factory::new); + } else if (startTimestamp.dataType() == DATETIME && endTimestamp.dataType() == DATE_NANOS) { + return toEvaluator(toEvaluator, DateDiffConstantMillisNanosEvaluator.Factory::new, DateDiffMillisNanosEvaluator.Factory::new); + } + throw new UnsupportedOperationException( + "Invalid types [" + + startTimestamp.dataType() + + ", " + + endTimestamp.dataType() + + "] " + + "If you see this error, there is a bug in DateDiff.resolveType()" + ); + } + + private ExpressionEvaluator.Factory toEvaluator( + ToEvaluator toEvaluator, + DateDiffConstantFactory constantFactory, + DateDiffFactory dateDiffFactory + ) { ExpressionEvaluator.Factory startTimestampEvaluator = toEvaluator.apply(startTimestamp); ExpressionEvaluator.Factory endTimestampEvaluator = toEvaluator.apply(endTimestamp); if (unit.foldable()) { try { Part datePartField = Part.resolve(((BytesRef) unit.fold(toEvaluator.foldCtx())).utf8ToString()); - return new DateDiffConstantEvaluator.Factory(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); + return constantFactory.build(source(), datePartField, startTimestampEvaluator, endTimestampEvaluator); } catch (IllegalArgumentException e) { throw new InvalidArgumentException("invalid unit format for [{}]: {}", sourceText(), e.getMessage()); } } ExpressionEvaluator.Factory unitEvaluator = toEvaluator.apply(unit); - return new DateDiffEvaluator.Factory(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); + return dateDiffFactory.build(source(), unitEvaluator, startTimestampEvaluator, endTimestampEvaluator); } @Override @@ -248,8 +337,10 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - TypeResolution resolution = isString(unit, sourceText(), FIRST).and(isDate(startTimestamp, sourceText(), SECOND)) - .and(isDate(endTimestamp, sourceText(), THIRD)); + String operationName = sourceText(); + TypeResolution resolution = isString(unit, sourceText(), FIRST).and( + TypeResolutions.isType(startTimestamp, DataType::isDate, operationName, SECOND, "datetime or date_nanos") + ).and(TypeResolutions.isType(endTimestamp, DataType::isDate, operationName, THIRD, "datetime or date_nanos")); if (resolution.unresolved()) { return resolution; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 52686430ca5b5..611c7a456864a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -11,13 +11,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -31,17 +26,29 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.io.IOException; import java.util.List; -import java.util.stream.IntStream; import java.util.stream.Stream; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; +import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_NANOS; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT; +import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; /** - * Function returning the first non-null value. + * Function returning the first non-null value. {@code COALESCE} runs as though + * it were lazily evaluating each position in each incoming {@link Block}. */ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Coalesce", Coalesce::new); @@ -194,70 +201,16 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { - List childEvaluators = children().stream().map(toEvaluator::apply).toList(); - return new ExpressionEvaluator.Factory() { - @Override - public ExpressionEvaluator get(DriverContext context) { - return new CoalesceEvaluator( - context, - PlannerUtils.toElementType(dataType()), - childEvaluators.stream().map(x -> x.get(context)).toList() - ); - } - - @Override - public String toString() { - return "CoalesceEvaluator[values=" + childEvaluators + ']'; - } + return switch (dataType()) { + case BOOLEAN -> CoalesceBooleanEvaluator.toEvaluator(toEvaluator, children()); + case DOUBLE, COUNTER_DOUBLE -> CoalesceDoubleEvaluator.toEvaluator(toEvaluator, children()); + case INTEGER, COUNTER_INTEGER -> CoalesceIntEvaluator.toEvaluator(toEvaluator, children()); + case LONG, DATE_NANOS, DATETIME, COUNTER_LONG, UNSIGNED_LONG -> CoalesceLongEvaluator.toEvaluator(toEvaluator, children()); + case KEYWORD, TEXT, SEMANTIC_TEXT, CARTESIAN_POINT, CARTESIAN_SHAPE, GEO_POINT, GEO_SHAPE, IP, VERSION -> + CoalesceBytesRefEvaluator.toEvaluator(toEvaluator, children()); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; + case UNSUPPORTED, SHORT, BYTE, DATE_PERIOD, OBJECT, DOC_DATA_TYPE, SOURCE, TIME_DURATION, FLOAT, HALF_FLOAT, TSID_DATA_TYPE, + SCALED_FLOAT, PARTIAL_AGG -> throw new UnsupportedOperationException(dataType() + " can't be coalesced"); }; } - - private record CoalesceEvaluator(DriverContext driverContext, ElementType resultType, List evaluators) - implements - EvalOperator.ExpressionEvaluator { - @Override - public Block eval(Page page) { - /* - * We have to evaluate lazily so any errors or warnings that would be - * produced by the right hand side are avoided. And so if anything - * on the right hand side is slow we skip it. - * - * And it'd be good if that lazy evaluation were fast. But this - * implementation isn't. It's fairly simple - running position at - * a time - but it's not at all fast. - */ - int positionCount = page.getPositionCount(); - try (Block.Builder result = resultType.newBlockBuilder(positionCount, driverContext.blockFactory())) { - position: for (int p = 0; p < positionCount; p++) { - int[] positions = new int[] { p }; - Page limited = new Page( - 1, - IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) - ); - try (Releasable ignored = limited::releaseBlocks) { - for (EvalOperator.ExpressionEvaluator eval : evaluators) { - try (Block block = eval.eval(limited)) { - if (false == block.isNull(0)) { - result.copyFrom(block, 0, 1); - continue position; - } - } - } - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "CoalesceEvaluator[values=" + evaluators + ']'; - } - - @Override - public void close() { - Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st new file mode 100644 index 0000000000000..33841f03f7803 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/X-CoalesceEvaluator.java.st @@ -0,0 +1,234 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +$endif$ +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.$Type$Block; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; + +import java.util.List; +import java.util.stream.IntStream; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Coalesce}. + * This class is generated. Edit {@code X-InEvaluator.java.st} instead. + */ +abstract sealed class Coalesce$Type$Evaluator implements EvalOperator.ExpressionEvaluator permits + Coalesce$Type$Evaluator.Coalesce$Type$EagerEvaluator, // + Coalesce$Type$Evaluator.Coalesce$Type$LazyEvaluator { + + static ExpressionEvaluator.Factory toEvaluator(EvaluatorMapper.ToEvaluator toEvaluator, List children) { + List childEvaluators = children.stream().map(toEvaluator::apply).toList(); + if (childEvaluators.stream().allMatch(ExpressionEvaluator.Factory::eagerEvalSafeInLazy)) { + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Coalesce$Type$EagerEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "Coalesce$Type$EagerEvaluator[values=" + childEvaluators + ']'; + } + }; + } + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Coalesce$Type$LazyEvaluator(context, childEvaluators.stream().map(x -> x.get(context)).toList()); + } + + @Override + public String toString() { + return "Coalesce$Type$LazyEvaluator[values=" + childEvaluators + ']'; + } + }; + } + + protected final DriverContext driverContext; + protected final List evaluators; + + protected Coalesce$Type$Evaluator(DriverContext driverContext, List evaluators) { + this.driverContext = driverContext; + this.evaluators = evaluators; + } + + @Override + public final $Type$Block eval(Page page) { + return entireBlock(page); + } + + /** + * Evaluate COALESCE for an entire {@link Block} for as long as we can, then shift to + * {@link #perPosition} evaluation. + *

+ * Entire Block evaluation is the "normal" way to run the compute engine, + * just calling {@link EvalOperator.ExpressionEvaluator#eval}. It's much faster so we try + * that first. For each evaluator, we {@linkplain EvalOperator.ExpressionEvaluator#eval} and: + *

+ *
    + *
  • If the {@linkplain Block} doesn't have any nulls we return it. COALESCE done.
  • + *
  • If the {@linkplain Block} is only nulls we skip it and try the next evaluator.
  • + *
  • If this is the last evaluator we just return it. COALESCE done.
  • + *
  • + * Otherwise, the {@linkplain Block} has mixed nulls and non-nulls so we drop + * into a per position evaluator. + *
  • + *
+ */ + private $Type$Block entireBlock(Page page) { + int lastFullBlockIdx = 0; + while (true) { + $Type$Block lastFullBlock = ($Type$Block) evaluators.get(lastFullBlockIdx++).eval(page); + if (lastFullBlockIdx == evaluators.size() || lastFullBlock.asVector() != null) { + return lastFullBlock; + } + if (lastFullBlock.areAllValuesNull()) { + // Result is all nulls and isn't the last result so we don't need any of it. + lastFullBlock.close(); + continue; + } + // The result has some nulls and some non-nulls. + return perPosition(page, lastFullBlock, lastFullBlockIdx); + } + } + + /** + * Evaluate each position of the incoming {@link Page} for COALESCE + * independently. Our attempt to evaluate entire blocks has yielded + * a block that contains some nulls and some non-nulls and we have + * to fill in the nulls with the results of calling the remaining + * evaluators. + *

+ * This must not return warnings caused by + * evaluating positions for which a previous evaluator returned + * non-null. These are positions that, at least from the perspective + * of a compute engine user, don't have to be + * evaluated. Put another way, this must function as though + * {@code COALESCE} were per-position lazy. It can manage that + * any way it likes. + *

+ */ + protected abstract $Type$Block perPosition(Page page, $Type$Block lastFullBlock, int firstToEvaluate); + + @Override + public final String toString() { + return getClass().getSimpleName() + "[values=" + evaluators + ']'; + } + + @Override + public final void close() { + Releasables.closeExpectNoException(() -> Releasables.close(evaluators)); + } + + /** + * Evaluates {@code COALESCE} eagerly per position if entire-block evaluation fails. + * First we evaluate all remaining evaluators, and then we pluck the first non-null + * value from each one. This is much faster than + * {@link Coalesce$Type$LazyEvaluator} but will include spurious warnings if any of the + * evaluators make them so we only use it for evaluators that are + * {@link Factory#eagerEvalSafeInLazy safe} to evaluate eagerly + * in a lazy environment. + */ + static final class Coalesce$Type$EagerEvaluator extends Coalesce$Type$Evaluator { + Coalesce$Type$EagerEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected $Type$Block perPosition(Page page, $Type$Block lastFullBlock, int firstToEvaluate) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int positionCount = page.getPositionCount(); + $Type$Block[] flatten = new $Type$Block[evaluators.size() - firstToEvaluate + 1]; + try { + flatten[0] = lastFullBlock; + for (int f = 1; f < flatten.length; f++) { + flatten[f] = ($Type$Block) evaluators.get(firstToEvaluate + f - 1).eval(page); + } + try ($Type$Block.Builder result = driverContext.blockFactory().new$Type$BlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + for ($Type$Block f : flatten) { + if (false == f.isNull(p)) { + result.copyFrom(f, p$if(BytesRef)$, scratch$endif$); + continue position; + } + } + result.appendNull(); + } + return result.build(); + } + } finally { + Releasables.close(flatten); + } + } + } + + /** + * Evaluates {@code COALESCE} lazily per position if entire-block evaluation fails. + * For each position we either: + *
    + *
  • Take the non-null values from the {@code lastFullBlock}
  • + *
  • + * Evaluator the remaining evaluators one at a time, keeping + * the first non-null value. + *
  • + *
+ */ + static final class Coalesce$Type$LazyEvaluator extends Coalesce$Type$Evaluator { + Coalesce$Type$LazyEvaluator(DriverContext driverContext, List evaluators) { + super(driverContext, evaluators); + } + + @Override + protected $Type$Block perPosition(Page page, $Type$Block lastFullBlock, int firstToEvaluate) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int positionCount = page.getPositionCount(); + try ($Type$Block.Builder result = driverContext.blockFactory().new$Type$BlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (lastFullBlock.isNull(p) == false) { + result.copyFrom(lastFullBlock, p, p + 1); + continue; + } + int[] positions = new int[] { p }; + Page limited = new Page( + 1, + IntStream.range(0, page.getBlockCount()).mapToObj(b -> page.getBlock(b).filter(positions)).toArray(Block[]::new) + ); + try (Releasable ignored = limited::releaseBlocks) { + for (int e = firstToEvaluate; e < evaluators.size(); e++) { + try ($Type$Block block = ($Type$Block) evaluators.get(e).eval(limited)) { + if (false == block.isNull(0)) { + result.copyFrom(block, 0$if(BytesRef)$, scratch$endif$); + continue position; + } + } + } + result.appendNull(); + } + } + return result.build(); + } finally { + lastFullBlock.close(); + } + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index 688cbbb992443..76facd2631001 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -58,7 +58,7 @@ Matching special characters (eg. `.`, `*`, `(`...) will require escaping. ---- include::{esql-specs}/string.csv-spec[tag=rlikeEscapingTripleQuotes] ---- - """, examples = @Example(file = "docs", tag = "rlike")) + """, operator = "RLIKE", examples = @Example(file = "docs", tag = "rlike")) public RLike( Source source, @Param(name = "str", type = { "keyword", "text" }, description = "A literal value.") Expression value, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index 8c596ee032bee..da0c0e47fbc52 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -69,7 +69,7 @@ also act on a constant (literal) expression. The right-hand side of the operator ---- include::{esql-specs}/string.csv-spec[tag=likeEscapingTripleQuotes] ---- - """, examples = @Example(file = "docs", tag = "like")) + """, operator = "LIKE", examples = @Example(file = "docs", tag = "like")) public WildcardLike( Source source, @Param(name = "str", type = { "keyword", "text" }, description = "A literal expression.") Expression left, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 283e305d79270..114fcda1e634a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -41,7 +41,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.FilteredExpression; -import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.MatchOperator; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.logical.And; @@ -988,6 +988,6 @@ public Expression visitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpress matchFieldExpression = expression(ctx.fieldExp); } - return new Match(source(ctx), matchFieldExpression, expression(ctx.matchQuery)); + return new MatchOperator(source(ctx), matchFieldExpression, expression(ctx.matchQuery)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7d99cf5988597..75619958c5228 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -55,9 +55,9 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -83,6 +83,7 @@ public class ComputeService { private final AtomicLong childSessionIdGenerator = new AtomicLong(); private final DataNodeComputeHandler dataNodeComputeHandler; private final ClusterComputeHandler clusterComputeHandler; + private final ExchangeService exchangeService; @SuppressWarnings("this-escape") public ComputeService( @@ -113,6 +114,7 @@ public ComputeService( esqlExecutor, dataNodeComputeHandler ); + this.exchangeService = exchangeService; } public void execute( @@ -195,11 +197,12 @@ public void execute( var exchangeSource = new ExchangeSourceHandler( queryPragmas.exchangeBufferSize(), transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), - computeListener.acquireAvoid() + ActionListener.runBefore(computeListener.acquireAvoid(), () -> exchangeService.removeExchangeSourceHandler(sessionId)) ); + exchangeService.addExchangeSourceHandler(sessionId, exchangeSource); try (Releasable ignored = exchangeSource.addEmptySink()) { // run compute on the coordinator - final AtomicReference localResponse = new AtomicReference<>(new ComputeResponse(List.of())); + final AtomicBoolean localClusterWasInterrupted = new AtomicBoolean(); try ( var localListener = new ComputeListener( transportService.getThreadPool(), @@ -207,16 +210,13 @@ public void execute( computeListener.acquireCompute().delegateFailure((l, profiles) -> { if (execInfo.isCrossClusterSearch() && execInfo.clusterAliases().contains(LOCAL_CLUSTER)) { var tookTime = TimeValue.timeValueNanos(System.nanoTime() - execInfo.getRelativeStartNanos()); - var r = localResponse.get(); - var merged = new ComputeResponse( - profiles, - tookTime, - r.totalShards, - r.successfulShards, - r.skippedShards, - r.failedShards + var status = localClusterWasInterrupted.get() + ? EsqlExecutionInfo.Cluster.Status.PARTIAL + : EsqlExecutionInfo.Cluster.Status.SUCCESSFUL; + execInfo.swapCluster( + LOCAL_CLUSTER, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(status).setTook(tookTime).build() ); - updateExecutionInfo(execInfo, LOCAL_CLUSTER, merged); } l.onResponse(profiles); }) @@ -241,7 +241,17 @@ public void execute( exchangeSource, cancelQueryOnFailure, localListener.acquireCompute().map(r -> { - localResponse.set(r); + localClusterWasInterrupted.set(execInfo.isPartial()); + if (execInfo.isCrossClusterSearch() && execInfo.clusterAliases().contains(LOCAL_CLUSTER)) { + execInfo.swapCluster( + LOCAL_CLUSTER, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(r.getTotalShards()) + .setSuccessfulShards(r.getSuccessfulShards()) + .setSkippedShards(r.getSkippedShards()) + .setFailedShards(r.getFailedShards()) + .build() + ); + } return r.getProfiles(); }) ); @@ -269,22 +279,19 @@ public void execute( } private void updateExecutionInfo(EsqlExecutionInfo executionInfo, String clusterAlias, ComputeResponse resp) { - TimeValue tookOnCluster; - if (resp.getTook() != null) { - TimeValue remoteExecutionTime = resp.getTook(); - final long planningTime; - if (clusterAlias.equals(LOCAL_CLUSTER)) { - planningTime = 0L; + Function runningToSuccess = status -> { + if (status == EsqlExecutionInfo.Cluster.Status.RUNNING) { + return executionInfo.isPartial() ? EsqlExecutionInfo.Cluster.Status.PARTIAL : EsqlExecutionInfo.Cluster.Status.SUCCESSFUL; } else { - planningTime = executionInfo.planningTookTime().nanos(); + return status; } - tookOnCluster = new TimeValue(planningTime + remoteExecutionTime.nanos(), TimeUnit.NANOSECONDS); + }; + if (resp.getTook() != null) { + var tookTime = TimeValue.timeValueNanos(executionInfo.planningTookTime().nanos() + resp.getTook().nanos()); executionInfo.swapCluster( clusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) - // for now ESQL doesn't return partial results, so set status to SUCCESSFUL - .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) - .setTook(tookOnCluster) + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(runningToSuccess.apply(v.getStatus())) + .setTook(tookTime) .setTotalShards(resp.getTotalShards()) .setSuccessfulShards(resp.getSuccessfulShards()) .setSkippedShards(resp.getSkippedShards()) @@ -294,14 +301,11 @@ private void updateExecutionInfo(EsqlExecutionInfo executionInfo, String cluster } else { // if the cluster is an older version and does not send back took time, then calculate it here on the coordinator // and leave shard info unset, so it is not shown in the CCS metadata section of the JSON response - long remoteTook = System.nanoTime() - executionInfo.getRelativeStartNanos(); - tookOnCluster = new TimeValue(remoteTook, TimeUnit.NANOSECONDS); + var tookTime = TimeValue.timeValueNanos(System.nanoTime() - executionInfo.getRelativeStartNanos()); executionInfo.swapCluster( clusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v) - // for now ESQL doesn't return partial results, so set status to SUCCESSFUL - .setStatus(EsqlExecutionInfo.Cluster.Status.SUCCESSFUL) - .setTook(tookOnCluster) + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setStatus(runningToSuccess.apply(v.getStatus())) + .setTook(tookTime) .build() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index b79dda900f39c..4379e2e8041ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; import org.elasticsearch.xpack.esql.EsqlUsageTransportAction; import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; +import org.elasticsearch.xpack.esql.action.EsqlAsyncStopAction; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.EsqlQueryRequestBuilder; import org.elasticsearch.xpack.esql.action.EsqlResolveFieldsAction; @@ -60,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; +import org.elasticsearch.xpack.esql.action.RestEsqlStopAsyncAction; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; @@ -151,7 +153,8 @@ public List> getSettings() { new ActionHandler<>(XPackUsageFeatureAction.ESQL, EsqlUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.ESQL, EsqlInfoTransportAction.class), new ActionHandler<>(EsqlResolveFieldsAction.TYPE, EsqlResolveFieldsAction.class), - new ActionHandler<>(EsqlSearchShardsAction.TYPE, EsqlSearchShardsAction.class) + new ActionHandler<>(EsqlSearchShardsAction.TYPE, EsqlSearchShardsAction.class), + new ActionHandler<>(EsqlAsyncStopAction.INSTANCE, TransportEsqlAsyncStopAction.class) ); } @@ -171,6 +174,7 @@ public List getRestHandlers( new RestEsqlQueryAction(), new RestEsqlAsyncQueryAction(), new RestEsqlGetAsyncResultAction(), + new RestEsqlStopAsyncAction(), new RestEsqlDeleteAsyncResultAction() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java index 4bcebcfe64cb9..5658db0599186 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -75,7 +75,7 @@ public Writeable.Reader responseReader() { /** * Unwraps the exception in the case of failure. This keeps the exception types - * the same as the sync API, namely ParsingException and ParsingException. + * the same as the sync API, namely ParsingException and VerificationException. */ static ActionListener unwrapListener(ActionListener listener) { return new ActionListener<>() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java new file mode 100644 index 0000000000000..a4007a520ed30 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.EsqlRefCountingListener; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.async.AsyncExecutionId; +import org.elasticsearch.xpack.core.async.AsyncSearchSecurity; +import org.elasticsearch.xpack.core.async.AsyncStopRequest; +import org.elasticsearch.xpack.core.async.AsyncTaskIndexService; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.esql.action.EsqlAsyncStopAction; +import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; +import org.elasticsearch.xpack.esql.action.EsqlQueryTask; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; + +/** + * This action will stop running async request and collect the results. + * If the request is already finished, it will do the same thing as the regular async get. + */ +public class TransportEsqlAsyncStopAction extends HandledTransportAction { + + private final TransportEsqlQueryAction queryAction; + private final TransportEsqlAsyncGetResultsAction getResultsAction; + private final ExchangeService exchangeService; + private final BlockFactory blockFactory; + private final ClusterService clusterService; + private final TransportService transportService; + private final AsyncSearchSecurity security; + + @Inject + public TransportEsqlAsyncStopAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + TransportEsqlQueryAction queryAction, + TransportEsqlAsyncGetResultsAction getResultsAction, + Client client, + ExchangeService exchangeService, + BlockFactory blockFactory + ) { + super(EsqlAsyncStopAction.NAME, transportService, actionFilters, AsyncStopRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + this.queryAction = queryAction; + this.getResultsAction = getResultsAction; + this.exchangeService = exchangeService; + this.blockFactory = blockFactory; + this.transportService = transportService; + this.clusterService = clusterService; + this.security = new AsyncSearchSecurity( + XPackPlugin.ASYNC_RESULTS_INDEX, + new SecurityContext(clusterService.getSettings(), client.threadPool().getThreadContext()), + client, + ASYNC_SEARCH_ORIGIN + ); + } + + @Override + protected void doExecute(Task task, AsyncStopRequest request, ActionListener listener) { + AsyncExecutionId searchId = AsyncExecutionId.decode(request.getId()); + DiscoveryNode node = clusterService.state().nodes().get(searchId.getTaskId().getNodeId()); + if (clusterService.localNode().getId().equals(searchId.getTaskId().getNodeId()) || node == null) { + stopQueryAndReturnResult(task, searchId, listener); + } else { + transportService.sendRequest( + node, + EsqlAsyncStopAction.NAME, + request, + new ActionListenerResponseHandler<>(listener, EsqlQueryResponse.reader(blockFactory), EsExecutors.DIRECT_EXECUTOR_SERVICE) + ); + } + } + + /** + * Returns the ID for stored compute session. See {@link TransportEsqlQueryAction#sessionID(Task)} + */ + private String sessionID(AsyncExecutionId asyncId) { + return new TaskId(clusterService.localNode().getId(), asyncId.getTaskId().getId()).toString(); + } + + private void stopQueryAndReturnResult(Task task, AsyncExecutionId asyncId, ActionListener listener) { + String asyncIdStr = asyncId.getEncoded(); + TransportEsqlQueryAction.EsqlQueryListener asyncListener = queryAction.getAsyncListener(asyncIdStr); + if (asyncListener == null) { + // This should mean one of the two things: either bad request ID, or the query has already finished + // In both cases, let regular async get deal with it. + var getAsyncResultRequest = new GetAsyncResultRequest(asyncIdStr); + // TODO: this should not be happening, but if the listener is not registered and the query is not finished, + // we give it some time to finish + getAsyncResultRequest.setWaitForCompletionTimeout(new TimeValue(1, TimeUnit.SECONDS)); + getResultsAction.execute(task, getAsyncResultRequest, listener); + return; + } + try { + EsqlQueryTask asyncTask = AsyncTaskIndexService.getTask(taskManager, asyncId, EsqlQueryTask.class); + if (false == security.currentUserHasAccessToTask(asyncTask)) { + throw new ResourceNotFoundException(asyncId + " not found"); + } + } catch (IOException e) { + throw new ResourceNotFoundException(asyncId + " not found", e); + } + // Here we will wait for both the response to become available and for the finish operation to complete + var responseHolder = new AtomicReference(); + try (var refs = new EsqlRefCountingListener(listener.map(unused -> responseHolder.get()))) { + asyncListener.addListener(refs.acquire().map(r -> { + responseHolder.set(r); + return null; + })); + asyncListener.markAsPartial(); + exchangeService.finishSessionEarly(sessionID(asyncId), refs.acquire()); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index d83239545c383..a32b4591943f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -13,12 +13,14 @@ import org.elasticsearch.action.admin.cluster.stats.CCSUsageTelemetry; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; @@ -81,6 +83,8 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncListeners = ConcurrentCollections.newConcurrentMap(); @Inject @SuppressWarnings("this-escape") @@ -179,11 +183,41 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener } } + // Subscribable listener that can keep track of the EsqlExecutionInfo + // Used to mark an async query as partial if it is stopped + public static class EsqlQueryListener extends SubscribableListener { + private EsqlExecutionInfo executionInfo; + + public EsqlQueryListener(EsqlExecutionInfo executionInfo) { + this.executionInfo = executionInfo; + } + + public EsqlExecutionInfo getExecutionInfo() { + return executionInfo; + } + + public void markAsPartial() { + if (executionInfo != null) { + executionInfo.markAsPartial(); + } + } + } + @Override public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener listener) { // set EsqlExecutionInfo on async-search task so that it is accessible to GET _query/async while the query is still running task.setExecutionInfo(createEsqlExecutionInfo(request)); - ActionListener.run(listener, l -> innerExecute(task, request, l)); + // Since the request is async here, we need to wrap the listener in a SubscribableListener so that we can collect the results from + // other endpoints, such as _query/async/stop + EsqlQueryListener subListener = new EsqlQueryListener(task.executionInfo()); + String asyncExecutionId = task.getExecutionId().getEncoded(); + subListener.addListener(ActionListener.runAfter(listener, () -> asyncListeners.remove(asyncExecutionId))); + asyncListeners.put(asyncExecutionId, subListener); + ActionListener.run(subListener, l -> innerExecute(task, request, l)); + } + + public EsqlQueryListener getAsyncListener(String executionId) { + return asyncListeners.get(executionId); } private void innerExecute(Task task, EsqlQueryRequest request, ActionListener listener) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index f4c68f141460b..5743c7c6ec57f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -518,30 +518,66 @@ static EsqlQueryResponse fromXContent(XContentParser parser) { } } + public static int clusterDetailsSize(int numClusters) { + /* Example: + "_clusters" : { + "total" : 2, + "successful" : 2, + "running" : 0, + "skipped" : 0, + "partial" : 0, + "failed" : 0, + "details" : { + "(local)" : { + "status" : "successful", + "indices" : "logs-1", + "took" : 4444, + "_shards" : { + "total" : 10, + "successful" : 10, + "skipped" : 3, + "failed" : 0 + } + }, + "remote1" : { + "status" : "successful", + "indices" : "remote1:logs-1", + "took" : 4999, + "_shards" : { + "total" : 12, + "successful" : 12, + "skipped" : 5, + "failed" : 0 + } + } + } + } + */ + return numClusters * 4 + 6; + } + public void testChunkResponseSizeColumnar() { - int sizeClusterDetails = 14; try (EsqlQueryResponse resp = randomResponse(true, null)) { int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; - assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize); + assertChunkCount(resp, r -> 5 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize); } try (EsqlQueryResponse resp = randomResponseAsync(true, null, true)) { int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; - assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); // is_running + assertChunkCount(resp, r -> 6 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize); // is_running } } public void testChunkResponseSizeRows() { - int sizeClusterDetails = 14; try (EsqlQueryResponse resp = randomResponse(false, null)) { int bodySize = resp.pages().stream().mapToInt(Page::getPositionCount).sum(); - assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize); + assertChunkCount(resp, r -> 5 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize); } try (EsqlQueryResponse resp = randomResponseAsync(false, null, true)) { int bodySize = resp.pages().stream().mapToInt(Page::getPositionCount).sum(); - assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); + assertChunkCount(resp, r -> 6 + clusterDetailsSize(resp.getExecutionInfo().clusterInfo.size()) + bodySize); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 67dec69b51393..e08411240b841 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -44,7 +44,7 @@ import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.fulltext.MatchOperator; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; @@ -139,7 +139,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { entry("is_null", IsNull.class), entry("is_not_null", IsNotNull.class), // Match operator is both a function and an operator - entry("match_operator", Match.class) + entry("match_operator", MatchOperator.class) ); private static EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry().snapshotRegistry(); @@ -823,12 +823,13 @@ public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { return; } - String rendered = buildSignatureSvg(functionName()); + String name = functionName(); + String rendered = buildSignatureSvg(name); if (rendered == null) { LogManager.getLogger(getTestClass()).info("Skipping rendering signature because the function isn't registered"); } else { LogManager.getLogger(getTestClass()).info("Writing function signature"); - writeToTempDir("signature", rendered, "svg"); + writeToTempDir("signature", name, "svg", rendered); } } @@ -890,10 +891,13 @@ private static Map, DataType> signatures() { @AfterClass public static void renderDocs() throws IOException { + renderDocs(functionName()); + } + + protected static void renderDocs(String name) throws IOException { if (System.getProperty("generateDocs") == null) { return; } - String name = functionName(); if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { renderDocsForOperators(name); return; @@ -922,12 +926,12 @@ public static void renderDocs() throws IOException { description.isAggregation() ); } - renderTypes(description.args()); - renderParametersList(description.argNames(), description.argDescriptions()); + renderTypes(name, description.args()); + renderParametersList(name, description.argNames(), description.argDescriptions()); FunctionInfo info = EsqlFunctionRegistry.functionInfo(definition); - renderDescription(description.description(), info.detailedDescription(), info.note()); - boolean hasExamples = renderExamples(info); - boolean hasAppendix = renderAppendix(info.appendix()); + renderDescription(name, description.description(), info.detailedDescription(), info.note()); + boolean hasExamples = renderExamples(name, info); + boolean hasAppendix = renderAppendix(name, info.appendix()); renderFullLayout(name, info.preview(), hasExamples, hasAppendix); renderKibanaInlineDocs(name, info); renderKibanaFunctionDefinition(name, info, description.args(), description.variadic()); @@ -944,7 +948,7 @@ public static void renderDocs() throws IOException { + "may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview " + "are not subject to the support SLA of official GA features.\"]\n"; - private static void renderTypes(List args) throws IOException { + private static void renderTypes(String name, List args) throws IOException { StringBuilder header = new StringBuilder(); List argNames = args.stream().map(EsqlFunctionRegistry.ArgSignature::name).toList(); for (String arg : argNames) { @@ -984,11 +988,11 @@ private static void renderTypes(List args) th [%header.monospaced.styled,format=dsv,separator=|] |=== """ + header + "\n" + table.stream().collect(Collectors.joining("\n")) + "\n|===\n"; - LogManager.getLogger(getTestClass()).info("Writing function types for [{}]:\n{}", functionName(), rendered); - writeToTempDir("types", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing function types for [{}]:\n{}", name, rendered); + writeToTempDir("types", name, "asciidoc", rendered); } - private static void renderParametersList(List argNames, List argDescriptions) throws IOException { + private static void renderParametersList(String name, List argNames, List argDescriptions) throws IOException { StringBuilder builder = new StringBuilder(); builder.append(DOCS_WARNING); builder.append("*Parameters*\n"); @@ -996,11 +1000,11 @@ private static void renderParametersList(List argNames, List arg builder.append("\n`").append(argNames.get(a)).append("`::\n").append(argDescriptions.get(a)).append('\n'); } String rendered = builder.toString(); - LogManager.getLogger(getTestClass()).info("Writing parameters for [{}]:\n{}", functionName(), rendered); - writeToTempDir("parameters", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing parameters for [{}]:\n{}", name, rendered); + writeToTempDir("parameters", name, "asciidoc", rendered); } - private static void renderDescription(String description, String detailedDescription, String note) throws IOException { + private static void renderDescription(String name, String description, String detailedDescription, String note) throws IOException { String rendered = DOCS_WARNING + """ *Description* @@ -1013,11 +1017,11 @@ private static void renderDescription(String description, String detailedDescrip if (Strings.isNullOrEmpty(note) == false) { rendered += "\nNOTE: " + note + "\n"; } - LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", functionName(), rendered); - writeToTempDir("description", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", name, rendered); + writeToTempDir("description", name, "asciidoc", rendered); } - private static boolean renderExamples(FunctionInfo info) throws IOException { + private static boolean renderExamples(String name, FunctionInfo info) throws IOException { if (info == null || info.examples().length == 0) { return false; } @@ -1051,20 +1055,20 @@ private static boolean renderExamples(FunctionInfo info) throws IOException { } builder.append('\n'); String rendered = builder.toString(); - LogManager.getLogger(getTestClass()).info("Writing examples for [{}]:\n{}", functionName(), rendered); - writeToTempDir("examples", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing examples for [{}]:\n{}", name, rendered); + writeToTempDir("examples", name, "asciidoc", rendered); return true; } - private static boolean renderAppendix(String appendix) throws IOException { + private static boolean renderAppendix(String name, String appendix) throws IOException { if (appendix.isEmpty()) { return false; } String rendered = DOCS_WARNING + appendix + "\n"; - LogManager.getLogger(getTestClass()).info("Writing appendix for [{}]:\n{}", functionName(), rendered); - writeToTempDir("appendix", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing appendix for [{}]:\n{}", name, rendered); + writeToTempDir("appendix", name, "asciidoc", rendered); return true; } @@ -1091,11 +1095,11 @@ private static void renderFullLayout(String name, boolean preview, boolean hasEx if (hasAppendix) { rendered += "include::../appendix/" + name + ".asciidoc[]\n"; } - LogManager.getLogger(getTestClass()).info("Writing layout for [{}]:\n{}", functionName(), rendered); - writeToTempDir("layout", rendered, "asciidoc"); + LogManager.getLogger(getTestClass()).info("Writing layout for [{}]:\n{}", name, rendered); + writeToTempDir("layout", name, "asciidoc", rendered); } - private static Constructor constructorWithFunctionInfo(Class clazz) { + protected static Constructor constructorWithFunctionInfo(Class clazz) { for (Constructor ctor : clazz.getConstructors()) { FunctionInfo functionInfo = ctor.getAnnotation(FunctionInfo.class); if (functionInfo != null) { @@ -1110,6 +1114,10 @@ private static void renderDocsForOperators(String name) throws IOException { assert ctor != null; FunctionInfo functionInfo = ctor.getAnnotation(FunctionInfo.class); assert functionInfo != null; + renderDocsForOperators(name, ctor, functionInfo); + } + + protected static void renderDocsForOperators(String name, Constructor ctor, FunctionInfo functionInfo) throws IOException { renderKibanaInlineDocs(name, functionInfo); var params = ctor.getParameters(); @@ -1127,7 +1135,7 @@ private static void renderDocsForOperators(String name) throws IOException { } } renderKibanaFunctionDefinition(name, functionInfo, args, likeOrInOperator(name)); - renderTypes(args); + renderTypes(name, args); } private static void renderKibanaInlineDocs(String name, FunctionInfo info) throws IOException { @@ -1151,8 +1159,8 @@ private static void renderKibanaInlineDocs(String name, FunctionInfo info) throw builder.append("Note: ").append(removeAsciidocLinks(info.note())).append("\n"); } String rendered = builder.toString(); - LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", functionName(), rendered); - writeToTempDir("kibana/docs", rendered, "md"); + LogManager.getLogger(getTestClass()).info("Writing kibana inline docs for [{}]:\n{}", name, rendered); + writeToTempDir("kibana/docs", name, "md", rendered); } private static void renderKibanaFunctionDefinition( @@ -1244,8 +1252,8 @@ private static void renderKibanaFunctionDefinition( builder.field("snapshot_only", EsqlFunctionRegistry.isSnapshotOnly(name)); String rendered = Strings.toString(builder.endObject()); - LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", functionName(), rendered); - writeToTempDir("kibana/definition", rendered, "json"); + LogManager.getLogger(getTestClass()).info("Writing kibana function definition for [{}]:\n{}", name, rendered); + writeToTempDir("kibana/definition", name, "json", rendered); } private static String removeAsciidocLinks(String asciidoc) { @@ -1340,7 +1348,10 @@ private static String unaryOperator(String name) { * If this tests is for a like or rlike operator return true, otherwise return {@code null}. */ private static boolean likeOrInOperator(String name) { - return name.equalsIgnoreCase("rlike") || name.equalsIgnoreCase("like") || name.equalsIgnoreCase("in"); + return switch (name.toLowerCase(Locale.ENGLISH)) { + case "rlike", "like", "in", "not_rlike", "not_like", "not_in" -> true; + default -> false; + }; } /** @@ -1350,11 +1361,11 @@ private static boolean likeOrInOperator(String name) { * don't have write permission to the docs. *

*/ - private static void writeToTempDir(String subdir, String str, String extension) throws IOException { + private static void writeToTempDir(String subdir, String name, String extension, String str) throws IOException { // We have to write to a tempdir because it's all test are allowed to write to. Gradle can move them. Path dir = PathUtils.get(System.getProperty("java.io.tmpdir")).resolve("esql").resolve("functions").resolve(subdir); Files.createDirectories(dir); - Path file = dir.resolve(functionName() + "." + extension); + Path file = dir.resolve(name + "." + extension); Files.writeString(file, str); LogManager.getLogger(getTestClass()).info("Wrote to file: {}", file); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java new file mode 100644 index 0000000000000..0a80da9c60625 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/AbstractMatchFullTextFunctionTests.java @@ -0,0 +1,365 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.stringCases; +import static org.hamcrest.Matchers.equalTo; + +public abstract class AbstractMatchFullTextFunctionTests extends AbstractFunctionTestCase { + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + + AbstractMatchFullTextFunctionTests.addUnsignedLongCases(suppliers); + AbstractMatchFullTextFunctionTests.addNumericCases(suppliers); + AbstractMatchFullTextFunctionTests.addNonNumericCases(suppliers); + AbstractMatchFullTextFunctionTests.addQueryAsStringTestCases(suppliers); + AbstractMatchFullTextFunctionTests.addStringTestCases(suppliers); + + return parameterSuppliersFromTypedData(suppliers); + } + + private static void addNonNumericCases(List suppliers) { + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.booleanCases(), + TestCaseSupplier.booleanCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.ipCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.versionCases(""), + List.of(), + false + ) + ); + // Datetime + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.dateCases(), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.dateNanosCases(), + List.of(), + false + ) + ); + } + + private static void addNumericCases(List suppliers) { + suppliers.addAll( + TestCaseSupplier.forBinaryComparisonWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs<>( + new TestCaseSupplier.NumericTypeTestConfig<>( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> true, + "EqualsIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> true, + "EqualsLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig<>( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + // NB: this has different behavior than Double::equals + (l, r) -> true, + "EqualsDoublesEvaluator" + ) + ), + "field", + "query", + (lhs, rhs) -> List.of(), + false + ) + ); + } + + private static void addUnsignedLongCases(List suppliers) { + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.longCases(Long.MIN_VALUE, Long.MAX_VALUE, true), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.doubleCases(Double.MIN_VALUE, Double.MAX_VALUE, true), + List.of(), + false + ) + ); + } + + private static void addQueryAsStringTestCases(List suppliers) { + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.longCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.doubleCases(Double.MIN_VALUE, Double.MAX_VALUE, true), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.booleanCases(), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.ipCases(), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.versionCases(""), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + // Datetime + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.dateCases(), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + null, + "field", + "query", + Object::equals, + DataType.BOOLEAN, + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.stringCases(DataType.KEYWORD), + List.of(), + false + ) + ); + } + + private static void addStringTestCases(List suppliers) { + for (DataType fieldType : DataType.stringTypes()) { + if (DataType.UNDER_CONSTRUCTION.containsKey(fieldType)) { + continue; + } + for (TestCaseSupplier.TypedDataSupplier queryDataSupplier : stringCases(fieldType)) { + suppliers.add( + TestCaseSupplier.testCaseSupplier( + queryDataSupplier, + new TestCaseSupplier.TypedDataSupplier(fieldType.typeName(), () -> randomAlphaOfLength(10), DataType.KEYWORD), + (d1, d2) -> equalTo("string"), + DataType.BOOLEAN, + (o1, o2) -> true + ) + ); + } + } + } + + public final void testLiteralExpressions() { + Expression expression = buildLiteralExpression(testCase); + assertFalse("expected resolved", expression.typeResolved().unresolved()); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java index 1f4e8e40a8259..a83cb24a44a45 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java @@ -26,7 +26,7 @@ public class MatchErrorTests extends ErrorsForCasesWithoutExamplesTestCase { @Override protected List cases() { - return paramsToSuppliers(MatchTests.parameters()); + return paramsToSuppliers(AbstractMatchFullTextFunctionTests.parameters()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java index 951aff80541bd..78ea3f5451880 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java @@ -10,9 +10,12 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import java.util.List; import java.util.function.Supplier; /** @@ -27,6 +30,11 @@ public MatchOperatorTests(@Name("TestCase") Supplier @ParametersFactory public static Iterable parameters() { - return MatchTests.parameters(); + return AbstractMatchFullTextFunctionTests.parameters(); + } + + @Override + protected Expression build(Source source, List args) { + return new MatchOperator(source, args.get(0), args.get(1)); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index cb0c9b263b547..4280ab487f213 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -12,22 +12,14 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import java.math.BigInteger; -import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.stringCases; -import static org.hamcrest.Matchers.equalTo; - @FunctionName("match") -public class MatchTests extends AbstractFunctionTestCase { +public class MatchTests extends AbstractMatchFullTextFunctionTests { public MatchTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -35,342 +27,7 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - List suppliers = new ArrayList<>(); - - addUnsignedLongCases(suppliers); - addNumericCases(suppliers); - addNonNumericCases(suppliers); - addQueryAsStringTestCases(suppliers); - addStringTestCases(suppliers); - - return parameterSuppliersFromTypedData(suppliers); - } - - private static void addNonNumericCases(List suppliers) { - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.booleanCases(), - TestCaseSupplier.booleanCases(), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ipCases(), - TestCaseSupplier.ipCases(), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.versionCases(""), - TestCaseSupplier.versionCases(""), - List.of(), - false - ) - ); - // Datetime - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), - List.of(), - false - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.dateNanosCases(), - TestCaseSupplier.dateNanosCases(), - List.of(), - false - ) - ); - } - - private static void addNumericCases(List suppliers) { - suppliers.addAll( - TestCaseSupplier.forBinaryComparisonWithWidening( - new TestCaseSupplier.NumericTypeTestConfigs<>( - new TestCaseSupplier.NumericTypeTestConfig<>( - (Integer.MIN_VALUE >> 1) - 1, - (Integer.MAX_VALUE >> 1) - 1, - (l, r) -> true, - "EqualsIntsEvaluator" - ), - new TestCaseSupplier.NumericTypeTestConfig<>( - (Long.MIN_VALUE >> 1) - 1, - (Long.MAX_VALUE >> 1) - 1, - (l, r) -> true, - "EqualsLongsEvaluator" - ), - new TestCaseSupplier.NumericTypeTestConfig<>( - Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY, - // NB: this has different behavior than Double::equals - (l, r) -> true, - "EqualsDoublesEvaluator" - ) - ), - "field", - "query", - (lhs, rhs) -> List.of(), - false - ) - ); - } - - private static void addUnsignedLongCases(List suppliers) { - // TODO: These should be integrated into the type cross product above, but are currently broken - // see https://github.com/elastic/elasticsearch/issues/102935 - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), - TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), - TestCaseSupplier.longCases(Long.MIN_VALUE, Long.MAX_VALUE, true), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), - TestCaseSupplier.doubleCases(Double.MIN_VALUE, Double.MAX_VALUE, true), - List.of(), - false - ) - ); - } - - private static void addQueryAsStringTestCases(List suppliers) { - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.longCases(Integer.MIN_VALUE, Integer.MAX_VALUE, true), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.doubleCases(Double.MIN_VALUE, Double.MAX_VALUE, true), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - - // Unsigned Long cases - // TODO: These should be integrated into the type cross product above, but are currently broken - // see https://github.com/elastic/elasticsearch/issues/102935 - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ulongCases(BigInteger.ZERO, NumericUtils.UNSIGNED_LONG_MAX, true), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.booleanCases(), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.ipCases(), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.versionCases(""), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - // Datetime - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - - suppliers.addAll( - TestCaseSupplier.forBinaryNotCasting( - null, - "field", - "query", - Object::equals, - DataType.BOOLEAN, - TestCaseSupplier.dateNanosCases(), - TestCaseSupplier.stringCases(DataType.KEYWORD), - List.of(), - false - ) - ); - } - - private static void addStringTestCases(List suppliers) { - for (DataType fieldType : DataType.stringTypes()) { - if (DataType.UNDER_CONSTRUCTION.containsKey(fieldType)) { - continue; - } - for (TestCaseSupplier.TypedDataSupplier queryDataSupplier : stringCases(fieldType)) { - suppliers.add( - TestCaseSupplier.testCaseSupplier( - queryDataSupplier, - new TestCaseSupplier.TypedDataSupplier(fieldType.typeName(), () -> randomAlphaOfLength(10), DataType.KEYWORD), - (d1, d2) -> equalTo("string"), - DataType.BOOLEAN, - (o1, o2) -> true - ) - ); - } - } - } - - public final void testLiteralExpressions() { - Expression expression = buildLiteralExpression(testCase); - assertFalse("expected resolved", expression.typeResolved().unresolved()); + return AbstractMatchFullTextFunctionTests.parameters(); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java index a3a808de277d7..7f70c6e8cd372 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffErrorTests.java @@ -36,7 +36,7 @@ protected Matcher expectedTypeErrorMatcher(List> validPerP if (i == 0) { return "string"; } - return "datetime"; + return "datetime or date_nanos"; })); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java index e194443a8bc2c..7380ac08f85a2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java @@ -18,7 +18,10 @@ public class DateDiffFunctionTests extends ESTestCase { public void testDateDiffFunctionErrorUnitNotValid() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> DateDiff.processMillis(new BytesRef("sseconds"), 0, 0) + ); assertThat( e.getMessage(), containsString( @@ -27,7 +30,7 @@ public void testDateDiffFunctionErrorUnitNotValid() { ) ); - e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); + e = expectThrows(IllegalArgumentException.class, () -> DateDiff.processMillis(new BytesRef("not-valid-unit"), 0, 0)); assertThat( e.getMessage(), containsString( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index e2e2f0572c7aa..e23283d899576 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -101,7 +102,7 @@ private static List makeSuppliers(Instant startTimestamp, Inst // Units as Keyword case return List.of( new TestCaseSupplier( - "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( @@ -109,15 +110,60 @@ private static List makeSuppliers(Instant startTimestamp, Inst new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(expected) ) ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.KEYWORD, DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffNanosEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.KEYWORD, DataType.DATE_NANOS, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") + ), + "DateDiffNanosMillisEvaluator[unit=Attribute[channel=0], startTimestampNanos=Attribute[channel=1], " + + "endTimestampMillis=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffMillisNanosEvaluator[unit=Attribute[channel=0], startTimestampMillis=Attribute[channel=1], " + + "endTimestampNanos=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), // Units as text case new TestCaseSupplier( - "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, List.of(DataType.TEXT, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( @@ -125,11 +171,56 @@ private static List makeSuppliers(Instant startTimestamp, Inst new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(expected) ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.TEXT, DataType.DATE_NANOS, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffNanosEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.TEXT, DataType.DATE_NANOS, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(DateUtils.toLong(startTimestamp), DataType.DATE_NANOS, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") + ), + "DateDiffNanosMillisEvaluator[unit=Attribute[channel=0], startTimestampNanos=Attribute[channel=1], " + + "endTimestampMillis=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) + ), + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, + List.of(DataType.TEXT, DataType.DATETIME, DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(DateUtils.toLong(endTimestamp), DataType.DATE_NANOS, "endTimestamp") + ), + "DateDiffMillisNanosEvaluator[unit=Attribute[channel=0], startTimestampMillis=Attribute[channel=1], " + + "endTimestampNanos=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(expected) + ) ) ); } @@ -146,7 +237,7 @@ private static List makeSuppliers(Instant startTimestamp, Inst new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(null) @@ -163,7 +254,7 @@ private static List makeSuppliers(Instant startTimestamp, Inst new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "DateDiffMillisEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(null) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index 688341ebaa2b7..1235a175294af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -12,8 +12,13 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; @@ -29,6 +34,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunctionTestCase; import org.elasticsearch.xpack.esql.planner.Layout; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.time.ZonedDateTime; @@ -40,6 +46,9 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class CoalesceTests extends AbstractScalarFunctionTestCase { public CoalesceTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -49,7 +58,7 @@ public CoalesceTests(@Name("TestCase") Supplier testC @ParametersFactory public static Iterable parameters() { List noNullsSuppliers = new ArrayList<>(); - VaragsTestCaseBuilder builder = new VaragsTestCaseBuilder(type -> "Coalesce"); + VaragsTestCaseBuilder builder = new VaragsTestCaseBuilder(type -> "Coalesce" + type + "Eager"); builder.expectString(strings -> strings.filter(v -> v != null).findFirst()); builder.expectLong(longs -> longs.filter(v -> v != null).findFirst()); builder.expectInt(ints -> ints.filter(v -> v != null).findFirst()); @@ -64,7 +73,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(first, DataType.IP, "first"), new TestCaseSupplier.TypedData(second, DataType.IP, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceBytesRefEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.IP, equalTo(first == null ? second : first) ); @@ -79,7 +88,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(first, DataType.VERSION, "first"), new TestCaseSupplier.TypedData(second, DataType.VERSION, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceBytesRefEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.VERSION, equalTo(first == null ? second : first) ); @@ -92,7 +101,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(firstDate, DataType.DATETIME, "first"), new TestCaseSupplier.TypedData(secondDate, DataType.DATETIME, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceLongEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.DATETIME, equalTo(firstDate == null ? secondDate : firstDate) ); @@ -105,7 +114,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(firstDate, DataType.DATE_NANOS, "first"), new TestCaseSupplier.TypedData(secondDate, DataType.DATE_NANOS, "second") ), - "CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", + "CoalesceLongEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]", DataType.DATE_NANOS, equalTo(firstDate == null ? secondDate : firstDate) ); @@ -129,6 +138,20 @@ public static Iterable parameters() { suppliers.add(new TestCaseSupplier(nullCaseName(s, nullUpTo, true), types, () -> nullCase(s.get(), finalNullUpTo, true))); } } + suppliers.add( + new TestCaseSupplier( + List.of(DataType.NULL, DataType.NULL), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(null, DataType.NULL, "first"), + new TestCaseSupplier.TypedData(null, DataType.NULL, "second") + ), + "ConstantNull", + DataType.NULL, + nullValue() + ) + ) + ); return parameterSuppliersFromTypedData(suppliers); } @@ -167,7 +190,7 @@ protected static void addSpatialCombinations(List suppliers) { TestCaseSupplier.testCaseSupplier( leftDataSupplier, rightDataSupplier, - (l, r) -> equalTo("CoalesceEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]"), + (l, r) -> equalTo("CoalesceBytesRefEagerEvaluator[values=[Attribute[channel=0], Attribute[channel=1]]]"), dataType, (l, r) -> l ) @@ -235,6 +258,69 @@ public void testCoalesceNotNullable() { sub.add(between(0, sub.size()), randomLiteral(sub.get(sub.size() - 1).dataType())); Coalesce exp = build(Source.EMPTY, sub); // Known not to be nullable because it contains a non-null literal - assertThat(exp.nullable(), equalTo(Nullability.FALSE)); + if (testCase.expectedType() == DataType.NULL) { + assertThat(exp.nullable(), equalTo(Nullability.UNKNOWN)); + } else { + assertThat(exp.nullable(), equalTo(Nullability.FALSE)); + } + } + + /** + * Inserts random non-null garbage around the expected data and runs COALESCE. + *

+ * This is important for catching the case where your value is null, but the rest of the block + * isn't null. An off-by-one error in the evaluators can break this in a way that the standard + * tests weren't catching and this does. + *

+ */ + public void testEvaluateWithGarbage() { + DriverContext context = driverContext(); + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + int positions = between(2, 1024); + List data = testCase.getData(); + Page onePositionPage = row(testCase.getDataValues()); + Block[] blocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; + int realPosition = between(0, positions - 1); + try { + int blocksIndex = 0; + for (TestCaseSupplier.TypedData d : data) { + blocks[blocksIndex] = blockWithRandomGarbage( + context.blockFactory(), + d.type(), + onePositionPage.getBlock(blocksIndex), + positions, + realPosition + ); + blocksIndex++; + } + try ( + EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(context); + Block block = eval.eval(new Page(positions, blocks)) + ) { + assertThat(block.getPositionCount(), is(positions)); + assertThat(toJavaObjectUnsignedLongAware(block, realPosition), testCase.getMatcher()); + assertThat("evaluates to tracked block", block.blockFactory(), sameInstance(context.blockFactory())); + } + } finally { + Releasables.close(onePositionPage::releaseBlocks, Releasables.wrap(blocks)); + } + } + + private Block blockWithRandomGarbage( + BlockFactory blockFactory, + DataType type, + Block singlePositionBlock, + int totalPositions, + int insertLocation + ) { + try (Block.Builder builder = PlannerUtils.toElementType(type).newBlockBuilder(totalPositions, blockFactory)) { + for (int p = 0; p < totalPositions; p++) { + Block copyFrom = p == insertLocation + ? singlePositionBlock + : BlockUtils.constantBlock(TestBlockFactory.getNonBreakingInstance(), randomLiteral(type).value(), 1); + builder.copyFrom(copyFrom, 0, 1); + } + return builder.build(); + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 589477a8bebdc..26340be224082 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -20,7 +20,9 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.junit.AfterClass; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -150,4 +152,9 @@ static Expression buildRLike(Logger logger, Source source, List args ? new RLike(source, expression, new RLikePattern(patternString), true) : new RLike(source, expression, new RLikePattern(patternString)); } + + @AfterClass + public static void renderNotRLike() throws IOException { + WildcardLikeTests.renderNot(constructorWithFunctionInfo(RLike.class), "RLIKE", d -> d); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index e60c5f77ab42e..7f04f076ed15f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.unboundid.util.NotNull; import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -18,11 +19,19 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.junit.AfterClass; +import java.io.IOException; +import java.lang.annotation.Annotation; +import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.function.Function; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -87,4 +96,67 @@ static Expression buildWildcardLike(Source source, List args) { } return new WildcardLike(source, expression, new WildcardPattern(((BytesRef) pattern.fold(FoldContext.small())).utf8ToString())); } + + @AfterClass + public static void renderNotLike() throws IOException { + renderNot(constructorWithFunctionInfo(WildcardLike.class), "LIKE", d -> d); + } + + public static void renderNot(@NotNull Constructor ctor, String name, Function description) throws IOException { + FunctionInfo orig = ctor.getAnnotation(FunctionInfo.class); + assert orig != null; + FunctionInfo functionInfo = new FunctionInfo() { + @Override + public Class annotationType() { + return orig.annotationType(); + } + + @Override + public String operator() { + return "NOT " + name; + } + + @Override + public String[] returnType() { + return orig.returnType(); + } + + @Override + public boolean preview() { + return orig.preview(); + } + + @Override + public String description() { + return description.apply(orig.description().replace(name, "NOT " + name)); + } + + @Override + public String detailedDescription() { + return ""; + } + + @Override + public String note() { + return orig.note().replace(name, "NOT " + name); + } + + @Override + public String appendix() { + return orig.appendix().replace(name, "NOT " + name); + } + + @Override + public boolean isAggregation() { + return orig.isAggregation(); + } + + @Override + public Example[] examples() { + // throw away examples + return new Example[] {}; + } + }; + renderDocsForOperators("not_" + name.toLowerCase(Locale.ENGLISH), ctor, functionInfo); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java index 80f67ec8e5e3a..03a4b063d6294 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InTests.java @@ -19,7 +19,10 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLikeTests; +import org.junit.AfterClass; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -333,4 +336,14 @@ private static void bytesRefs(List suppliers, int items) { protected Expression build(Source source, List args) { return new In(source, args.get(args.size() - 1), args.subList(0, args.size() - 1)); } + + @AfterClass + public static void renderNotIn() throws IOException { + WildcardLikeTests.renderNot( + constructorWithFunctionInfo(In.class), + "IN", + d -> "The `NOT IN` operator allows testing whether a field or expression does *not* equal any element " + + "in a list of literals, fields or expressions." + ); + } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java index 5fc4448c80940..69912a967fd22 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListenerTests.java @@ -85,8 +85,7 @@ public class ServerSentEventsRestActionListenerTests extends ESIntegTestCase { private static final Exception expectedException = new IllegalStateException("hello there"); private static final String expectedExceptionAsServerSentEvent = """ {\ - "error":{"root_cause":[{"type":"illegal_state_exception","reason":"hello there",\ - "caused_by":{"type":"illegal_state_exception","reason":"hello there"}}],\ + "error":{"root_cause":[{"type":"illegal_state_exception","reason":"hello there"}],\ "type":"illegal_state_exception","reason":"hello there"},"status":500\ }"""; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java index 7ad3e8eea0538..16b1e1ac741bb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.apache.lucene.search.Explanation; -import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,6 +24,10 @@ public class TextSimilarityRankDoc extends RankDoc { public final String inferenceId; public final String field; + public TextSimilarityRankDoc(int doc, float score, int shardIndex) { + this(doc, score, shardIndex, null, null); + } + public TextSimilarityRankDoc(int doc, float score, int shardIndex, String inferenceId, String field) { super(doc, score, shardIndex); this.inferenceId = inferenceId; @@ -33,12 +36,18 @@ public TextSimilarityRankDoc(int doc, float score, int shardIndex, String infere public TextSimilarityRankDoc(StreamInput in) throws IOException { super(in); - inferenceId = in.readString(); - field = in.readString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN)) { + inferenceId = in.readOptionalString(); + field = in.readOptionalString(); + } else { + inferenceId = in.readString(); + field = in.readString(); + } } @Override public Explanation explain(Explanation[] sources, String[] queryNames) { + assert inferenceId != null && field != null; final String queryAlias = queryNames[0] == null ? "" : "[" + queryNames[0] + "]"; return Explanation.match( score, @@ -54,8 +63,13 @@ public Explanation explain(Explanation[] sources, String[] queryNames) { @Override public void doWriteTo(StreamOutput out) throws IOException { - out.writeString(inferenceId); - out.writeString(field); + if (out.getTransportVersion().onOrAfter(TransportVersions.RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN)) { + out.writeOptionalString(inferenceId); + out.writeOptionalString(field); + } else { + out.writeString(inferenceId == null ? "" : inferenceId); + out.writeString(field == null ? "" : field); + } } @Override @@ -92,12 +106,11 @@ public String getWriteableName() { @Override protected void doToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("inferenceId", inferenceId); - builder.field("field", field); - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_16_0; + if (inferenceId != null) { + builder.field("inferenceId", inferenceId); + } + if (field != null) { + builder.field("field", field); + } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 42248d246d3da..10a1bc324fd2b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -136,13 +136,23 @@ protected TextSimilarityRankRetrieverBuilder clone( } @Override - protected RankDoc[] combineInnerRetrieverResults(List rankResults) { + protected RankDoc[] combineInnerRetrieverResults(List rankResults, boolean explain) { assert rankResults.size() == 1; ScoreDoc[] scoreDocs = rankResults.getFirst(); TextSimilarityRankDoc[] textSimilarityRankDocs = new TextSimilarityRankDoc[scoreDocs.length]; for (int i = 0; i < scoreDocs.length; i++) { ScoreDoc scoreDoc = scoreDocs[i]; - textSimilarityRankDocs[i] = new TextSimilarityRankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex, inferenceId, field); + if (explain) { + textSimilarityRankDocs[i] = new TextSimilarityRankDoc( + scoreDoc.doc, + scoreDoc.score, + scoreDoc.shardIndex, + inferenceId, + field + ); + } else { + textSimilarityRankDocs[i] = new TextSimilarityRankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex); + } } return textSimilarityRankDocs; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java index 62cbcf902a9e0..6991e1325f3bc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/ServerSentEventsRestActionListener.java @@ -40,15 +40,10 @@ import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Iterator; -import java.util.Map; import java.util.Objects; import java.util.concurrent.Flow; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE; -import static org.elasticsearch.ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE; -import static org.elasticsearch.rest.RestController.ERROR_TRACE_DEFAULT; - /** * A version of {@link org.elasticsearch.rest.action.RestChunkedToXContentListener} that reads from a {@link Flow.Publisher} and encodes * the response in Server-Sent Events. @@ -154,48 +149,23 @@ public void onFailure(Exception e) { } } - // taken indirectly from "new Response(channel, e)" - // except we need to emit the error as SSE private ChunkedToXContent errorChunk(Throwable t) { var status = ExceptionsHelper.status(t); - return params -> Iterators.concat(ChunkedToXContentHelper.startObject(), ChunkedToXContentHelper.chunk((b, p) -> { - // Render the exception with a simple message - if (channel.detailedErrorsEnabled() == false) { - String message = "No ElasticsearchException found"; - var inner = t; - for (int counter = 0; counter < 10 && inner != null; counter++) { - if (inner instanceof ElasticsearchException) { - message = inner.getClass().getSimpleName() + "[" + inner.getMessage() + "]"; - break; - } - inner = inner.getCause(); - } - return b.field("error", message); - } - - var errorParams = p; - if (errorParams.paramAsBoolean("error_trace", ERROR_TRACE_DEFAULT) && status != RestStatus.UNAUTHORIZED) { - errorParams = new ToXContent.DelegatingMapParams( - Map.of(REST_EXCEPTION_SKIP_STACK_TRACE, "false", REST_EXCEPTION_SKIP_CAUSE, "true"), - params - ); - } - // Render the exception with all details - final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(t); - b.startObject("error"); - { - b.startArray("root_cause"); - for (ElasticsearchException rootCause : rootCauses) { - b.startObject(); - rootCause.toXContent(b, errorParams); - b.endObject(); - } - b.endArray(); - } - ElasticsearchException.generateThrowableXContent(b, errorParams, t); - return b.endObject(); - }), ChunkedToXContentHelper.field("status", status.getStatus()), ChunkedToXContentHelper.endObject()); + Exception e; + if (t instanceof Exception) { + e = (Exception) t; + } else { + // if not exception, then error, and we should not let it escape. rethrow on another thread, and inform the user we're stopping. + ExceptionsHelper.maybeDieOnAnotherThread(t); + e = new RuntimeException("Fatal error while streaming response", t); + } + return params -> Iterators.concat( + ChunkedToXContentHelper.startObject(), + Iterators.single((b, p) -> ElasticsearchException.generateFailureXContent(b, p, e, channel.detailedErrorsEnabled())), + Iterators.single((b, p) -> b.field("status", status.getStatus())), + ChunkedToXContentHelper.endObject() + ); } private void requestNextChunk(ActionListener listener) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index 112be95dac1fd..4c1cac4d7a77b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -115,7 +115,7 @@ private URI createUri() throws ElasticsearchStatusException { try { // TODO, consider transforming the base URL into a URI for better error handling. return new URI( - elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/sparse-text-embeddings/" + modelIdUriPath + elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/embed/text/sparse/" + modelIdUriPath ); } catch (URISyntaxException e) { throw new ElasticsearchStatusException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java index 84039cd7cc33c..b26f80efb1930 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionModel.java @@ -106,7 +106,7 @@ public URI uri() { private URI createUri() throws ElasticsearchStatusException { try { // TODO, consider transforming the base URL into a URI for better error handling. - return new URI(elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/chat/completions"); + return new URI(elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/chat"); } catch (URISyntaxException e) { throw new ElasticsearchStatusException( "Failed to create URI for service [" diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 409d62426949c..6f033fdfd2f22 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -187,7 +187,6 @@ public void testGetModelNoSecrets() { var listener = new PlainActionFuture(); registry.getModel("1", listener); - registry.getModel("1", listener); var modelConfig = listener.actionGet(TIMEOUT); assertEquals("1", modelConfig.inferenceEntityId()); assertEquals("foo", modelConfig.service()); diff --git a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java index 4a9d13bc642d7..78d59c0af0d06 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java +++ b/x-pack/plugin/logsdb/qa/with-basic/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbWithBasicRestIT.java @@ -61,14 +61,7 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { var settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "field1").build(); createIndex("test-index", settings, mapping); } else { - String mapping = """ - { - "_source": { - "mode": "synthetic" - } - } - """; - createIndex("test-index", Settings.EMPTY, mapping); + createIndex("test-index", Settings.builder().put("index.mapping.source.mode", "synthetic").build()); } var response = getAsMap("/_license/feature_usage"); @SuppressWarnings("unchecked") @@ -85,21 +78,6 @@ public void testLogsdbIndexGetsStoredSource() throws IOException { assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); } - public void testLogsdbOverrideSyntheticSourceModeInMapping() throws IOException { - final String index = "test-index"; - String mapping = """ - { - "_source": { - "mode": "synthetic" - } - } - """; - createIndex(index, Settings.builder().put("index.mode", "logsdb").build(), mapping); - var settings = (Map) ((Map) getIndexSettings(index).get(index)).get("settings"); - assertEquals("logsdb", settings.get("index.mode")); - assertEquals(SourceFieldMapper.Mode.STORED.toString(), settings.get("index.mapping.source.mode")); - } - public void testLogsdbOverrideSyntheticSourceSetting() throws IOException { final String index = "test-index"; createIndex( diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index b5a3ff482c3cf..b4abdfd09ffc9 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.junit.Before; @@ -24,6 +23,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; @SuppressWarnings("unchecked") public class LogsIndexModeCustomSettingsIT extends LogsIndexModeRestTestIT { @@ -102,12 +102,12 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { "template": { "settings": { "index": { - "mode": "logsdb" - } - }, - "mappings": { - "_source": { - "mode": "stored" + "mode": "logsdb", + "mapping": { + "source": { + "mode": "stored" + } + } } } } @@ -115,21 +115,10 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); Request request = new Request("PUT", "_data_stream/logs-custom-dev"); - if (SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); - } assertOK(client.performRequest(request)); - var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); - String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); - assertThat(sourceMode, equalTo("stored")); - - request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client.performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [logs@custom]") - ); + var indexName = getDataStreamBackingIndex(client, "logs-custom-dev", 0); + var settings = (Map) ((Map) ((Map) getIndexSettings(indexName)).get(indexName)).get("settings"); + assertThat(settings, hasEntry("index.mapping.source.mode", "stored")); } public void testConfigureDisabledSourceBeforeIndexCreation() { @@ -163,12 +152,12 @@ public void testConfigureDisabledSourceModeBeforeIndexCreation() { "template": { "settings": { "index": { - "mode": "logsdb" - } - }, - "mappings": { - "_source": { - "mode": "disabled" + "mode": "logsdb", + "mapping": { + "source": { + "mode": "disabled" + } + } } } } @@ -186,9 +175,13 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { var storedSourceMapping = """ { "template": { - "mappings": { - "_source": { - "mode": "stored" + "settings": { + "index": { + "mapping": { + "source": { + "mode": "stored" + } + } } } } @@ -196,22 +189,11 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { assertOK(putComponentTemplate(client, "logs@custom", storedSourceMapping)); Request request = new Request("PUT", "_data_stream/logs-custom-dev"); - if (SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions(expectVersionSpecificWarnings(v -> v.current(SourceFieldMapper.DEPRECATION_WARNING))); - } assertOK(client.performRequest(request)); - var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); - String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); - assertThat(sourceMode, equalTo("stored")); - - request = new Request("GET", "/_migration/deprecations"); - var nodeSettings = (Map) ((List) entityAsMap(client.performRequest(request)).get("node_settings")).getFirst(); - assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); - assertThat( - (String) nodeSettings.get("details"), - containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [logs@custom]") - ); + var indexName = getDataStreamBackingIndex(client, "logs-custom-dev", 0); + var settings = (Map) ((Map) ((Map) getIndexSettings(indexName)).get(indexName)).get("settings"); + assertThat(settings, hasEntry("index.mapping.source.mode", "stored")); } public void testConfigureDisabledSourceWhenIndexIsCreated() throws IOException { @@ -235,9 +217,13 @@ public void testConfigureDisabledSourceModeWhenIndexIsCreated() throws IOExcepti var disabledModeMapping = """ { "template": { - "mappings": { - "_source": { - "mode": "disabled" + "settings": { + "index": { + "mapping": { + "source": { + "mode": "disabled" + } + } } } } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java index 0990592cef5e3..cc7f5bdb33871 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeRestTestIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; @@ -36,11 +35,6 @@ protected static Response putComponentTemplate(final RestClient client, final St throws IOException { final Request request = new Request("PUT", "/_component_template/" + componentTemplate); request.setJsonEntity(contends); - if (isSyntheticSourceConfiguredInTemplate(contends) && SourceFieldMapper.onOrAfterDeprecateModeVersion(minimumIndexVersion())) { - request.setOptions( - expectVersionSpecificWarnings((VersionSensitiveWarningsHandler v) -> v.current(SourceFieldMapper.DEPRECATION_WARNING)) - ); - } return client.performRequest(request); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java index 177858b84ad43..675b1baad7c2c 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbRestIT.java @@ -59,14 +59,7 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { var settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "field1").build(); createIndex("test-index", settings, mapping); } else { - String mapping = """ - { - "_source": { - "mode": "synthetic" - } - } - """; - createIndex("test-index", Settings.EMPTY, mapping); + createIndex("test-index", Settings.builder().put("index.mapping.source.mode", "synthetic").build()); } var response = getAsMap("/_license/feature_usage"); @SuppressWarnings("unchecked") @@ -77,8 +70,15 @@ public void testFeatureUsageWithLogsdbIndex() throws IOException { assertThat(feature.get("name"), equalTo("synthetic-source")); assertThat(feature.get("license_level"), equalTo("enterprise")); - var settings = (Map) ((Map) getIndexSettings("test-index").get("test-index")).get("settings"); - assertNull(settings.get("index.mapping.source.mode")); // Default, no downgrading. + var indexResponse = (Map) getIndexSettings("test-index", true).get("test-index"); + logger.info("indexResponse: {}", indexResponse); + var sourceMode = ((Map) indexResponse.get("settings")).get("index.mapping.source.mode"); + if (sourceMode != null) { + assertThat(sourceMode, equalTo("synthetic")); + } else { + var defaultSourceMode = ((Map) indexResponse.get("defaults")).get("index.mapping.source.mode"); + assertThat(defaultSourceMode, equalTo("SYNTHETIC")); + } } } diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index 77319a881f1e5..5220b5eba4567 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -436,7 +436,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { """; boolean result = provider.getMappingHints(indexName, null, settings, List.of(new CompressedXContent(mapping))) .hasSyntheticSourceUsage(); - assertTrue(result); + assertFalse("_source.mode is a noop", result); assertThat(newMapperServiceCounter.get(), equalTo(1)); assertWarnings(SourceFieldMapper.DEPRECATION_WARNING); } diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 792df4dbf639e..08d724fa2cb71 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -7,15 +7,14 @@ create an index with disabled source mode and standard index mode without settin settings: index: mode: standard - mappings: - _source: - mode: disabled + mapping: + source: + mode: disabled - do: - indices.get_mapping: + indices.get_settings: index: test_disabled_standard - - - match: { test_disabled_standard.mappings._source.mode: disabled } + - match: { test_disabled_standard.settings.index.mapping.source.mode: disabled } --- create an index with stored source mode and standard index mode without setting: @@ -26,15 +25,15 @@ create an index with stored source mode and standard index mode without setting: settings: index: mode: standard - mappings: - _source: - mode: stored + mapping: + source: + mode: stored - do: - indices.get_mapping: + indices.get_settings: index: test_stored_standard - - match: { test_stored_standard.mappings._source.mode: stored } + - match: { test_stored_standard.settings.index.mapping.source.mode: stored } --- create an index with synthetic source mode and standard index mode without setting: @@ -45,15 +44,15 @@ create an index with synthetic source mode and standard index mode without setti settings: index: mode: standard - mappings: - _source: - mode: synthetic + mapping: + source: + mode: "synthetic" - do: - indices.get_mapping: + indices.get_settings: index: test_synthetic_standard - - match: { test_synthetic_standard.mappings._source.mode: synthetic } + - match: { test_synthetic_standard.settings.index.mapping.source.mode: synthetic } --- create an index with disabled source mode and logsdb index mode without setting: @@ -65,9 +64,9 @@ create an index with disabled source mode and logsdb index mode without setting: settings: index: mode: logsdb - mappings: - _source: - mode: disabled + mapping: + source: + mode: disabled - match: { error.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } @@ -81,20 +80,15 @@ create an index with stored source mode and logsdb index mode without setting: settings: index: mode: logsdb - mappings: - _source: - mode: stored + mapping: + source: + mode: stored - do: indices.get_settings: index: "test_stored_logsdb" - match: { test_stored_logsdb.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_stored_logsdb - - - match: { test_stored_logsdb.mappings._source.mode: stored } + - match: { test_stored_logsdb.settings.index.mapping.source.mode: stored } --- create an index with synthetic source mode and logsdb index mode without setting: @@ -105,15 +99,15 @@ create an index with synthetic source mode and logsdb index mode without setting settings: index: mode: logsdb - mappings: - _source: - mode: synthetic + mapping: + source: + mode: synthetic - do: - indices.get_mapping: + indices.get_settings: index: test_synthetic_logsdb - - match: { test_synthetic_logsdb.mappings._source.mode: synthetic } + - match: { test_synthetic_logsdb.settings.index.mapping.source.mode: synthetic } --- create an index with disabled source mode and time series index mode without setting: @@ -125,13 +119,14 @@ create an index with disabled source mode and time series index mode without set settings: index: mode: time_series + mapping: + source: + mode: disabled routing_path: [ keyword ] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: - _source: - mode: disabled properties: keyword: type: keyword @@ -149,13 +144,14 @@ create an index with stored source mode and time series index mode without setti settings: index: mode: time_series + mapping: + source: + mode: stored routing_path: [ keyword ] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: - _source: - mode: stored properties: keyword: type: keyword @@ -167,11 +163,10 @@ create an index with stored source mode and time series index mode without setti - match: { test_stored_time_series.settings.index.mode: time_series } - do: - indices.get_mapping: + indices.get_settings: index: test_stored_time_series - - match: { test_stored_time_series.mappings._source.mode: stored } - + - match: { test_stored_time_series.settings.index.mapping.source.mode: stored } --- create an index with synthetic source mode and time series index mode without setting: @@ -182,13 +177,14 @@ create an index with synthetic source mode and time series index mode without se settings: index: mode: time_series + mapping: + source: + mode: synthetic routing_path: [ keyword ] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z mappings: - _source: - mode: synthetic properties: keyword: type: keyword @@ -198,174 +194,56 @@ create an index with synthetic source mode and time series index mode without se indices.get_settings: index: "test_synthetic_time_series" - match: { test_synthetic_time_series.settings.index.mode: time_series } - - - do: - indices.get_mapping: - index: test_synthetic_time_series - - - match: { test_synthetic_time_series.mappings._source.mode: synthetic } + - match: { test_synthetic_time_series.settings.index.mapping.source.mode: synthetic } --- create an index with stored source mode: - do: indices.create: index: test_stored_default - body: - mappings: - _source: - mode: stored - - - do: - indices.get_mapping: - index: test_stored_default - - - match: { test_stored_default.mappings._source.mode: stored } - ---- -override stored to synthetic source mode: - - do: - indices.create: - index: test_stored_override body: settings: index: - mapping.source.mode: synthetic - mappings: - _source: - mode: stored + mapping: + source: + mode: stored - do: - indices.get_mapping: - index: test_stored_override - - - match: { test_stored_override.mappings._source.mode: synthetic } - ---- -override stored to disabled source mode: - - do: - indices.create: - index: test_stored_disabled - body: - settings: - index: - mapping.source.mode: disabled - mappings: - _source: - mode: stored - - - do: - indices.get_mapping: - index: test_stored_disabled + indices.get_settings: + index: test_stored_default - - match: { test_stored_disabled.mappings._source.mode: disabled } + - match: { test_stored_default.settings.index.mapping.source.mode: stored } --- create an index with disabled source mode: - do: indices.create: index: test_disabled_default - body: - mappings: - _source: - mode: disabled - - - do: - indices.get_mapping: - index: test_disabled_default - - - match: { test_disabled_default.mappings._source.mode: disabled } - ---- -override disabled to synthetic source mode: - - do: - indices.create: - index: test_disabled_synthetic body: settings: index: - mapping.source.mode: synthetic - mappings: - _source: - mode: disabled - - - do: - indices.get_mapping: - index: test_disabled_synthetic - - - match: { test_disabled_synthetic.mappings._source.mode: synthetic } - ---- -override disabled to stored source mode: - - do: - indices.create: - index: test_disabled_stored - body: - settings: - index: - mapping.source.mode: stored - mappings: - _source: - mode: disabled + mapping.source.mode: disabled - do: - indices.get_mapping: - index: test_disabled_stored + indices.get_settings: + index: test_disabled_default - - match: { test_disabled_stored.mappings._source.mode: stored } + - match: { test_disabled_default.settings.index.mapping.source.mode: disabled } --- create an index with synthetic source mode: - do: indices.create: index: test_synthetic_default - body: - mappings: - _source: - mode: synthetic - - - do: - indices.get_mapping: - index: test_synthetic_default - - - match: { test_synthetic_default.mappings._source.mode: synthetic } - ---- -override synthetic to stored source mode: - - do: - indices.create: - index: test_synthetic_stored - body: - settings: - index: - mapping.source.mode: stored - mappings: - _source: - mode: synthetic - - - do: - indices.get_mapping: - index: test_synthetic_stored - - - match: { test_synthetic_stored.mappings._source.mode: stored } - ---- -override synthetic to disabled source mode: - - do: - indices.create: - index: test_synthetic_disabled body: settings: index: - mapping.source.mode: disabled - mappings: - _source: - mode: synthetic + mapping.source.mode: synthetic - do: - indices.get_mapping: - index: test_synthetic_disabled - - - match: { test_synthetic_disabled.mappings._source.mode: disabled } + indices.get_settings: + index: test_synthetic_default + - match: { test_synthetic_default.settings.index.mapping.source.mode: synthetic } --- create an index with unspecified source mode: @@ -374,10 +252,10 @@ create an index with unspecified source mode: index: test_unset_default - do: - indices.get_mapping: + indices.get_settings: index: test_unset_default - - match: { test_unset_default.mappings._source.mode: null } + - match: { test_unset_default.settings.index.mapping.source.mode: null } --- override unspecified to stored source mode: @@ -436,15 +314,14 @@ create an index with standard index mode: settings: index: mode: standard - mappings: - _source: - mode: stored - + mapping: + source: + mode: stored - do: - indices.get_mapping: + indices.get_settings: index: test_standard_index_mode - - match: { test_standard_index_mode.mappings._source.mode: stored } + - match: { test_standard_index_mode.settings.index.mapping.source.mode: stored } --- create an index with time_series index mode and synthetic source: @@ -590,237 +467,25 @@ modify final setting after index creation: mapping.source.mode: synthetic --- -modify source mapping from stored to disabled after index creation: - - do: - indices.create: - index: test_modify_source_mode_stored_disabled - body: - settings: - index: - mapping.source.mode: stored +use no-op _source.mode attr: + - requires: + test_runner_features: [ "warnings" ] - do: - indices.put_mapping: - index: test_modify_source_mode_stored_disabled - body: - _source: - mode: disabled - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_stored_disabled - - match: { test_modify_source_mode_stored_disabled.mappings._source.mode: stored } - ---- -modify source mapping from stored to synthetic after index creation: - - do: + warnings: + - "Configuring source mode in mappings is deprecated and will be removed in future versions. Use [index.mapping.source.mode] index setting instead." indices.create: - index: test_modify_source_mode_stored_synthetic - body: - settings: - index: - mapping.source.mode: stored - - - do: - indices.put_mapping: - index: test_modify_source_mode_stored_synthetic - body: - _source: - mode: synthetic - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_stored_synthetic - - match: { test_modify_source_mode_stored_synthetic.mappings._source.mode: stored } - ---- -modify source mapping from disabled to stored after index creation: - - do: - indices.create: - index: test_modify_source_mode_disabled_stored - body: - settings: - index: - mapping.source.mode: disabled - - - do: - indices.put_mapping: - index: test_modify_source_mode_disabled_stored - body: - _source: - mode: stored - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_disabled_stored - - match: { test_modify_source_mode_disabled_stored.mappings._source.mode: disabled } - ---- -modify source mapping from disabled to synthetic after index creation: - - do: - indices.create: - index: test_modify_source_mode_disabled_synthetic - body: - settings: - index: - mapping.source.mode: disabled - - - do: - indices.put_mapping: - index: test_modify_source_mode_disabled_synthetic - body: - _source: - mode: synthetic - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_disabled_synthetic - - match: { test_modify_source_mode_disabled_synthetic.mappings._source.mode: disabled } - ---- -modify source mapping from synthetic to stored after index creation: - - do: - indices.create: - index: test_modify_source_mode_synthetic_stored - body: - settings: - index: - mapping.source.mode: synthetic - - - do: - indices.put_mapping: - index: test_modify_source_mode_synthetic_stored - body: - _source: - mode: stored - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_synthetic_stored - - match: { test_modify_source_mode_synthetic_stored.mappings._source.mode: synthetic } - ---- -modify source mapping from synthetic to disabled after index creation: - - do: - indices.create: - index: test_modify_source_mode_synthetic_disabled + index: test body: settings: index: mapping.source.mode: synthetic - - - do: - indices.put_mapping: - index: test_modify_source_mode_synthetic_disabled - body: - _source: - mode: disabled - - is_true: acknowledged - - - do: - indices.get_mapping: - index: test_modify_source_mode_synthetic_disabled - - match: { test_modify_source_mode_synthetic_disabled.mappings._source.mode: synthetic } - ---- -modify logsdb index source mode to disabled after index creation: - - do: - indices.create: - index: test_modify_logsdb_disabled_after_creation - body: - settings: - index: - mode: logsdb - - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_logsdb_disabled_after_creation - body: - _source: - mode: disabled - - match: { error.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [logsdb] index mode" } - ---- -modify logsdb index source mode to stored after index creation: - - do: - indices.create: - index: test_modify_logsdb_stored_after_creation - body: - settings: - index: - mode: logsdb - - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_logsdb_stored_after_creation - body: - _source: - mode: stored - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "Mapper for [_source] conflicts with existing mapper:\n\tCannot update parameter [mode] from [synthetic] to [stored]" } - ---- -modify time_series index source mode to disabled after index creation: - - do: - indices.create: - index: test_modify_time_series_disabled_after_creation - body: - settings: - index: - mode: time_series - routing_path: [ keyword ] - time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z mappings: - properties: - keyword: - type: keyword - time_series_dimension: true - - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_time_series_disabled_after_creation - body: - _source: - mode: disabled - - match: { error.type: "mapper_parsing_exception" } - - match: { error.reason: "Failed to parse mapping: _source can not be disabled in index using [time_series] index mode" } + _source: + mode: synthetic ---- -modify time_series index source mode to stored after index creation: - do: - indices.create: - index: test_modify_time_series_stored_after_creation - body: - settings: - index: - mode: time_series - routing_path: [ keyword ] - time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z - mappings: - properties: - keyword: - type: keyword - time_series_dimension: true + indices.get_settings: + index: test - - do: - catch: bad_request - indices.put_mapping: - index: test_modify_time_series_stored_after_creation - body: - _source: - mode: stored - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "Mapper for [_source] conflicts with existing mapper:\n\tCannot update parameter [mode] from [synthetic] to [stored]" } + - match: { test.settings.index.mapping.source.mode: synthetic } diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index 1a765ca06efbc..d38fa456582b3 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -76,8 +76,7 @@ * 2 for each key (one per document), a counted_terms aggregation on a counted_keyword field will consider * the actual count and report a count of 3 for each key.

* - *

Synthetic source is supported, but uses the fallback "ignore source" infrastructure unless the source_keep_mode is - * explicitly set to none in the field mapping parameters.

+ *

Synthetic source is fully supported.

*/ public class CountedKeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "counted_keyword"; @@ -274,9 +273,11 @@ private static CountedKeywordFieldMapper toType(FieldMapper in) { public static class Builder extends FieldMapper.Builder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).mappedFieldType.isIndexed(), true); private final Parameter> meta = Parameter.metaParam(); + private final SourceKeepMode indexSourceKeepMode; - protected Builder(String name) { + protected Builder(String name, SourceKeepMode indexSourceKeepMode) { super(name); + this.indexSourceKeepMode = indexSourceKeepMode; } @Override @@ -306,7 +307,8 @@ public FieldMapper build(MapperBuilderContext context) { countFieldMapper.fieldType() ), builderParams(this, context), - countFieldMapper + countFieldMapper, + indexSourceKeepMode ); } } @@ -386,21 +388,26 @@ public String fieldName() { } } - public static TypeParser PARSER = new TypeParser((n, c) -> new CountedKeywordFieldMapper.Builder(n)); + public static TypeParser PARSER = new TypeParser( + (n, c) -> new CountedKeywordFieldMapper.Builder(n, c.getIndexSettings().sourceKeepMode()) + ); private final FieldType fieldType; private final BinaryFieldMapper countFieldMapper; + private final SourceKeepMode indexSourceKeepMode; protected CountedKeywordFieldMapper( String simpleName, FieldType fieldType, MappedFieldType mappedFieldType, BuilderParams builderParams, - BinaryFieldMapper countFieldMapper + BinaryFieldMapper countFieldMapper, + SourceKeepMode indexSourceKeepMode ) { super(simpleName, mappedFieldType, builderParams); this.fieldType = fieldType; this.countFieldMapper = countFieldMapper; + this.indexSourceKeepMode = indexSourceKeepMode; } @Override @@ -482,7 +489,7 @@ public Iterator iterator() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName()).init(this); + return new Builder(leafName(), indexSourceKeepMode).init(this); } @Override @@ -492,8 +499,8 @@ protected String contentType() { @Override protected SyntheticSourceSupport syntheticSourceSupport() { - var keepMode = sourceKeepMode(); - if (keepMode.isPresent() == false || keepMode.get() != SourceKeepMode.NONE) { + var keepMode = sourceKeepMode().orElse(indexSourceKeepMode); + if (keepMode != SourceKeepMode.NONE) { return super.syntheticSourceSupport(); } diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java index c99edcf7352fa..176311565ec88 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java @@ -10,6 +10,8 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.DocumentMapper; @@ -20,12 +22,15 @@ import org.elasticsearch.search.lookup.SourceFilter; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.junit.AssumptionViolatedException; import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.stream.Stream; import static org.hamcrest.Matchers.equalTo; @@ -75,7 +80,6 @@ public void testSyntheticSourceSingleNullValue() throws IOException { DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { b.startObject("field"); minimalMapping(b); - b.field("synthetic_source_keep", "none"); b.endObject(); })).documentMapper(); @@ -94,7 +98,6 @@ public void testSyntheticSourceManyNullValue() throws IOException { DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { b.startObject("field"); minimalMapping(b); - b.field("synthetic_source_keep", "none"); b.endObject(); })).documentMapper(); @@ -114,19 +117,32 @@ public void testSyntheticSourceManyNullValue() throws IOException { assertThat(syntheticSource(mapper, new SourceFilter(null, new String[] { "field" }), buildInput), equalTo("{}")); } - @Override - public void testSyntheticSourceKeepAll() throws IOException { - // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" - } + public void testSyntheticSourceIndexLevelKeepArrays() throws IOException { + SyntheticSourceExample example = syntheticSourceSupportForKeepTests(shouldUseIgnoreMalformed()).example(1); + XContentBuilder mappings = mapping(b -> { + b.startObject("field"); + example.mapping().accept(b); + b.endObject(); + }); - @Override - public void testSyntheticSourceKeepArrays() throws IOException { - // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" - } + var settings = Settings.builder() + .put("index.mapping.source.mode", "synthetic") + .put("index.mapping.synthetic_source_keep", "arrays") + .build(); + DocumentMapper mapperAll = createMapperService(getVersion(), settings, () -> true, mappings).documentMapper(); - @Override - public void testSyntheticSourceKeepNone() throws IOException { - // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" + int elementCount = randomIntBetween(2, 5); + CheckedConsumer buildInput = (XContentBuilder builder) -> { + example.buildInputArray(builder, elementCount); + }; + + var builder = XContentFactory.jsonBuilder(); + builder.startObject(); + buildInput.accept(builder); + builder.endObject(); + String expected = Strings.toString(builder); + String actual = syntheticSource(mapperAll, buildInput); + assertThat(actual, equalTo(expected)); } @Override @@ -151,16 +167,21 @@ public SyntheticSourceExample example(int maxValues) throws IOException { return new SyntheticSourceExample(in, out, this::mapping); } + private final Set previousValues = new HashSet<>(); + private Tuple generateValue() { - String v = ESTestCase.randomAlphaOfLength(5); + String v; + if (previousValues.size() > 0 && randomBoolean()) { + v = randomFrom(previousValues); + } else { + v = ESTestCase.randomAlphaOfLength(5); + previousValues.add(v); + } return Tuple.tuple(v, v); } private void mapping(XContentBuilder b) throws IOException { minimalMapping(b); - // For now, synthetic source is only supported when "synthetic_source_keep" is "none". - // Once we implement true synthetic source support, we should remove this. - b.field("synthetic_source_keep", "none"); } @Override diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java index ef11c7dd3e9d9..11dcff0bfac7c 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.SourceToParse; @@ -40,7 +41,9 @@ protected List getSearchPlugins() { } public void testAggregatesCountedKeywords() throws Exception { - FieldMapper mapper = new CountedKeywordFieldMapper.Builder("stacktraces").build(MapperBuilderContext.root(false, false)); + FieldMapper mapper = new CountedKeywordFieldMapper.Builder("stacktraces", Mapper.SourceKeepMode.NONE).build( + MapperBuilderContext.root(false, false) + ); MappedFieldType fieldType = mapper.fieldType(); CountedTermsAggregationBuilder aggregationBuilder = new CountedTermsAggregationBuilder("st").field("stacktraces"); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java index fc2ca0364e8a2..b915eb3cd3e28 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamIndexTransportAction.java @@ -57,7 +57,7 @@ public class ReindexDataStreamIndexTransportAction extends HandledTransportActio public static final Setting REINDEX_MAX_REQUESTS_PER_SECOND_SETTING = new Setting<>( REINDEX_MAX_REQUESTS_PER_SECOND_KEY, - Float.toString(10f), + Float.toString(1000f), s -> { if (s.equals("-1")) { return Float.POSITIVE_INFINITY; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 52ffe3893f33c..e043fb60e92f7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorUtils; import org.elasticsearch.xpack.ml.extractor.ExtractedField; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.io.IOException; import java.io.InputStream; @@ -203,11 +204,12 @@ private InputStream processAndConsumeSearchHits(SearchHits hits) throws IOExcept BytesStreamOutput outputStream = new BytesStreamOutput(); SearchHit lastHit = hits.getAt(hits.getHits().length - 1); - lastTimestamp = context.extractedFields.timeFieldValue(lastHit); + lastTimestamp = context.extractedFields.timeFieldValue(lastHit, new SourceSupplier(lastHit)); try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(context.extractedFields, outputStream)) { for (SearchHit hit : hits) { + SourceSupplier sourceSupplier = new SourceSupplier(hit); if (isCancelled) { - Long timestamp = context.extractedFields.timeFieldValue(hit); + Long timestamp = context.extractedFields.timeFieldValue(hit, sourceSupplier); if (timestamp != null) { if (timestampOnCancel == null) { timestampOnCancel = timestamp; @@ -218,7 +220,7 @@ private InputStream processAndConsumeSearchHits(SearchHits hits) throws IOExcept } } } - hitProcessor.process(hit); + hitProcessor.process(hit, sourceSupplier); } } return outputStream.bytes().streamInput(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java index c2353d71a71da..cc2c5028039e6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessor.java @@ -12,6 +12,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.io.IOException; import java.io.OutputStream; @@ -27,10 +28,10 @@ class SearchHitToJsonProcessor implements Releasable { this.jsonBuilder = new XContentBuilder(JsonXContent.jsonXContent, outputStream); } - public void process(SearchHit hit) throws IOException { + public void process(SearchHit hit, SourceSupplier sourceSupplier) throws IOException { jsonBuilder.startObject(); for (ExtractedField field : fields.getAllFields()) { - writeKeyValue(field.getName(), field.value(hit)); + writeKeyValue(field.getName(), field.value(hit, sourceSupplier)); } jsonBuilder.endObject(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java index 7e78e5b9fd24a..74bde7b57bd90 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFields.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.util.ArrayList; import java.util.Arrays; @@ -40,8 +41,8 @@ public String timeField() { return timeField.getName(); } - public Long timeFieldValue(SearchHit hit) { - Object[] value = timeField.value(hit); + public Long timeFieldValue(SearchHit hit, SourceSupplier source) { + Object[] value = timeField.value(hit, source); if (value.length != 1) { throw new RuntimeException( "Time field [" + timeField.getName() + "] expected a single value; actual was: " + Arrays.toString(value) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index ff96c73bc002c..3a4cdbfee0623 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; import org.elasticsearch.xpack.ml.extractor.ProcessedField; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import java.io.IOException; import java.util.ArrayList; @@ -156,7 +157,7 @@ public void preview(ActionListener> listener) { List rows = new ArrayList<>(searchResponse.getHits().getHits().length); for (SearchHit hit : searchResponse.getHits().getHits()) { - String[] extractedValues = extractValues(hit); + String[] extractedValues = extractValues(hit, new SourceSupplier(hit)); rows.add(extractedValues); } delegate.onResponse(rows); @@ -255,9 +256,9 @@ private SearchHit[] processSearchResponse(SearchResponse searchResponse) { return searchResponse.getHits().asUnpooled().getHits(); } - private String extractNonProcessedValues(SearchHit hit, String organicFeature) { + private String extractNonProcessedValues(SearchHit hit, SourceSupplier sourceSupplier, String organicFeature) { ExtractedField field = extractedFieldsByName.get(organicFeature); - Object[] values = field.value(hit); + Object[] values = field.value(hit, sourceSupplier); if (values.length == 1 && isValidValue(values[0])) { return Objects.toString(values[0]); } @@ -270,8 +271,8 @@ private String extractNonProcessedValues(SearchHit hit, String organicFeature) { return null; } - private String[] extractProcessedValue(ProcessedField processedField, SearchHit hit) { - Object[] values = processedField.value(hit, extractedFieldsByName::get); + private String[] extractProcessedValue(ProcessedField processedField, SearchHit hit, SourceSupplier sourceSupplier) { + Object[] values = processedField.value(hit, sourceSupplier, extractedFieldsByName::get); if (values.length == 0 && context.supportsRowsWithMissingValues == false) { return null; } @@ -309,12 +310,13 @@ private String[] extractProcessedValue(ProcessedField processedField, SearchHit } public Row createRow(SearchHit hit) { - String[] extractedValues = extractValues(hit); + SourceSupplier sourceSupplier = new SourceSupplier(hit); + String[] extractedValues = extractValues(hit, sourceSupplier); if (extractedValues == null) { - return new Row(null, hit, true); + return new Row(null, hit, sourceSupplier, true); } boolean isTraining = trainTestSplitter.get().isTraining(extractedValues); - Row row = new Row(extractedValues, hit, isTraining); + Row row = new Row(extractedValues, hit, sourceSupplier, isTraining); LOGGER.trace( () -> format( "[%s] Extracted row: sort key = [%s], is_training = [%s], values = %s", @@ -327,18 +329,18 @@ public Row createRow(SearchHit hit) { return row; } - private String[] extractValues(SearchHit hit) { + private String[] extractValues(SearchHit hit, SourceSupplier sourceSupplier) { String[] extractedValues = new String[organicFeatures.length + processedFeatures.length]; int i = 0; for (String organicFeature : organicFeatures) { - String extractedValue = extractNonProcessedValues(hit, organicFeature); + String extractedValue = extractNonProcessedValues(hit, sourceSupplier, organicFeature); if (extractedValue == null) { return null; } extractedValues[i++] = extractedValue; } for (ProcessedField processedField : context.extractedFields.getProcessedFields()) { - String[] processedValues = extractProcessedValue(processedField, hit); + String[] processedValues = extractProcessedValue(processedField, hit, sourceSupplier); if (processedValues == null) { return null; } @@ -445,9 +447,12 @@ public static class Row { private final boolean isTraining; - private Row(String[] values, SearchHit hit, boolean isTraining) { + private final SourceSupplier sourceSupplier; + + private Row(String[] values, SearchHit hit, SourceSupplier sourceSupplier, boolean isTraining) { this.values = values; this.hit = hit; + this.sourceSupplier = sourceSupplier; this.isTraining = isTraining; } @@ -475,5 +480,9 @@ public int getChecksum() { public long getSortKey() { return (long) hit.getSortValues()[0]; } + + public Map getSource() { + return sourceSupplier.get(); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index 64cf493028ad1..1d80bfbc07293 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.ml.dataframe.stats.ProgressTracker; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.utils.MlIndicesUtils; @@ -210,8 +211,11 @@ private void inferTestDocs(LocalModel model, TestDocsIterator testDocsIterator, for (SearchHit doc : batch) { dataCountsTracker.incrementTestDocsCount(); - InferenceResults inferenceResults = model.inferNoStats(featuresFromDoc(doc)); - bulkIndexer.addAndExecuteIfNeeded(createIndexRequest(doc, inferenceResults, config.getDest().getResultsField())); + SourceSupplier sourceSupplier = new SourceSupplier(doc); + InferenceResults inferenceResults = model.inferNoStats(featuresFromDoc(doc, sourceSupplier)); + bulkIndexer.addAndExecuteIfNeeded( + createIndexRequest(doc, sourceSupplier, inferenceResults, config.getDest().getResultsField()) + ); processedDocCount++; int progressPercent = Math.min((int) (processedDocCount * 100.0 / totalDocCount), MAX_PROGRESS_BEFORE_COMPLETION); @@ -225,10 +229,10 @@ private void inferTestDocs(LocalModel model, TestDocsIterator testDocsIterator, } } - private Map featuresFromDoc(SearchHit doc) { + private Map featuresFromDoc(SearchHit doc, SourceSupplier sourceSupplier) { Map features = new HashMap<>(); for (ExtractedField extractedField : extractedFields.getAllFields()) { - Object[] values = extractedField.value(doc); + Object[] values = extractedField.value(doc, sourceSupplier); if (values.length == 1) { features.put(extractedField.getName(), values[0]); } @@ -236,11 +240,10 @@ private Map featuresFromDoc(SearchHit doc) { return features; } - private IndexRequest createIndexRequest(SearchHit hit, InferenceResults results, String resultField) { + private IndexRequest createIndexRequest(SearchHit hit, SourceSupplier sourceSupplier, InferenceResults results, String resultField) { Map resultsMap = new LinkedHashMap<>(results.asMap()); resultsMap.put(DestinationIndex.IS_TRAINING, false); - - Map source = new LinkedHashMap<>(hit.getSourceAsMap()); + Map source = new LinkedHashMap<>(sourceSupplier.get()); source.put(resultField, resultsMap); IndexRequest indexRequest = new IndexRequest(hit.getIndex()); indexRequest.id(hit.getId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java index 3e1968ca19ce1..08eb78e8274a4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java @@ -102,7 +102,7 @@ private void joinCurrentResults() { RowResults result = currentResults.pop(); DataFrameDataExtractor.Row row = dataFrameRowsIterator.next(); checkChecksumsMatch(row, result); - bulkIndexer.addAndExecuteIfNeeded(createIndexRequest(result, row.getHit())); + bulkIndexer.addAndExecuteIfNeeded(createIndexRequest(result, row)); } } @@ -130,11 +130,11 @@ private static void checkChecksumsMatch(DataFrameDataExtractor.Row row, RowResul } } - private IndexRequest createIndexRequest(RowResults result, SearchHit hit) { - Map source = new LinkedHashMap<>(hit.getSourceAsMap()); + private IndexRequest createIndexRequest(RowResults result, DataFrameDataExtractor.Row row) { + Map source = new LinkedHashMap<>(row.getSource()); source.putAll(result.getResults()); - IndexRequest indexRequest = new IndexRequest(hit.getIndex()); - indexRequest.id(hit.getId()); + IndexRequest indexRequest = new IndexRequest(row.getHit().getIndex()); + indexRequest.id(row.getHit().getId()); indexRequest.source(source); indexRequest.opType(DocWriteRequest.OpType.INDEX); indexRequest.setParentTask(parentTaskId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java index b2a6b887ce31d..0ec3cc6b577e5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/DocValueField.java @@ -23,7 +23,7 @@ public Method getMethod() { } @Override - public Object[] value(SearchHit hit) { + public Object[] value(SearchHit hit, SourceSupplier source) { return getFieldValue(hit); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java index 988263745e415..83a56f388a301 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedField.java @@ -48,10 +48,12 @@ enum Method { /** * Extracts the value from a {@link SearchHit} - * @param hit the search hit + * + * @param hit the search hit + * @param source the source supplier * @return the extracted value */ - Object[] value(SearchHit hit); + Object[] value(SearchHit hit, SourceSupplier source); /** * @return Whether the field can be fetched from source instead diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java index ffe30516f6766..69602c2c37ce5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java @@ -263,8 +263,8 @@ public Method getMethod() { } @Override - public Object[] value(SearchHit hit) { - Object[] value = field.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + Object[] value = field.value(hit, source); if (value != null) { return Arrays.stream(value).map(v -> { boolean asBoolean; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java index 9edc72ca38f73..be63ce30f5f9a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoPointField.java @@ -23,8 +23,8 @@ public GeoPointField(String name) { } @Override - public Object[] value(SearchHit hit) { - Object[] value = super.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + Object[] value = super.value(hit, source); if (value.length == 0) { return value; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java index d7b8827add05a..c4ee723b4ae81 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/GeoShapeField.java @@ -33,8 +33,8 @@ public GeoShapeField(String name) { } @Override - public Object[] value(SearchHit hit) { - Object[] value = super.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + Object[] value = super.value(hit, source); if (value.length == 0) { return value; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java index 8bbfc714e35dd..b86da90736c6c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/MultiField.java @@ -50,8 +50,8 @@ public Method getMethod() { } @Override - public Object[] value(SearchHit hit) { - return field.value(hit); + public Object[] value(SearchHit hit, SourceSupplier source) { + return field.value(hit, source); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java index 62ee8a3ffd20a..ebf43e15391d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ProcessedField.java @@ -38,7 +38,7 @@ public Set getOutputFieldType(String outputField) { return Collections.singleton(preProcessor.getOutputFieldType(outputField)); } - public Object[] value(SearchHit hit, Function fieldExtractor) { + public Object[] value(SearchHit hit, SourceSupplier sourceSupplier, Function fieldExtractor) { List inputFields = getInputFieldNames(); Map inputs = Maps.newMapWithExpectedSize(inputFields.size()); for (String field : inputFields) { @@ -46,7 +46,7 @@ public Object[] value(SearchHit hit, Function fieldExtra if (extractedField == null) { return new Object[0]; } - Object[] values = extractedField.value(hit); + Object[] values = extractedField.value(hit, sourceSupplier); if (values == null || values.length == 0) { continue; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java index add0bdc2fb1ed..d064a75d73a9e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ScriptField.java @@ -22,7 +22,7 @@ public Method getMethod() { } @Override - public Object[] value(SearchHit hit) { + public Object[] value(SearchHit hit, SourceSupplier source) { return getFieldValue(hit); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java index dc29533881cba..57abe104af2c4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceField.java @@ -27,15 +27,16 @@ public Method getMethod() { } @Override - public Object[] value(SearchHit hit) { - Map source = hit.getSourceAsMap(); + public Object[] value(SearchHit hit, SourceSupplier source) { + // This is the only one that might be problematic + Map sourceMap = source.get(); int level = 0; - while (source != null && level < path.length - 1) { - source = getNextLevel(source, path[level]); + while (sourceMap != null && level < path.length - 1) { + sourceMap = getNextLevel(sourceMap, path[level]); level++; } - if (source != null) { - Object values = source.get(path[level]); + if (sourceMap != null) { + Object values = sourceMap.get(path[level]); if (values != null) { if (values instanceof List) { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java new file mode 100644 index 0000000000000..192eaf1c6a652 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/SourceSupplier.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.extractor; + +import org.elasticsearch.search.SearchHit; + +import java.util.Map; +import java.util.function.Supplier; + +/** + * A supplier for the source of a search hit with caching capabilities. + */ +public final class SourceSupplier implements Supplier> { + + private final SearchHit searchHit; + private Map sourceMap; + + public SourceSupplier(SearchHit searchHit) { + this.searchHit = searchHit; + } + + @Override + public Map get() { + if (sourceMap == null) { + sourceMap = searchHit.getSourceAsMap(); + } + return sourceMap; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java index 7bea64e5a9a4e..a1b40f1600a0c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/TimeField.java @@ -38,7 +38,7 @@ public Method getMethod() { } @Override - public Object[] value(SearchHit hit) { + public Object[] value(SearchHit hit, SourceSupplier source) { Object[] value = getFieldValue(hit); if (value.length != 1) { return value; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java index ecb49ef9fa713..a19634e227616 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/SearchHitToJsonProcessorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.ml.extractor.DocValueField; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.extractor.TimeField; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; @@ -75,7 +76,7 @@ private String searchHitToString(ExtractedFields fields, SearchHit... searchHits ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(fields, outputStream)) { for (int i = 0; i < searchHits.length; i++) { - hitProcessor.process(searchHits[i]); + hitProcessor.process(searchHits[i], new SourceSupplier(searchHits[i])); } } return outputStream.toString(StandardCharsets.UTF_8.name()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java index a2264a4cf7a66..707fb1d5be202 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/TimeBasedExtractedFieldsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.ml.extractor.ExtractedFields; import org.elasticsearch.xpack.ml.extractor.ScriptField; import org.elasticsearch.xpack.ml.extractor.SourceField; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.extractor.TimeField; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; @@ -79,7 +80,7 @@ public void testStringTimeFieldValue() { long millis = randomLong(); SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField)); - assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); + assertThat(extractedFields.timeFieldValue(hit, new SourceSupplier(hit)), equalTo(millis)); } public void testPre6xTimeFieldValue() { @@ -87,7 +88,7 @@ public void testPre6xTimeFieldValue() { long millis = randomLong(); SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField)); - assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); + assertThat(extractedFields.timeFieldValue(hit, new SourceSupplier(hit)), equalTo(millis)); } public void testTimeFieldValueGivenEmptyArray() { @@ -95,7 +96,7 @@ public void testTimeFieldValueGivenEmptyArray() { TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField)); - expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); + expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit, new SourceSupplier(hit))); } public void testTimeFieldValueGivenValueHasTwoElements() { @@ -103,7 +104,7 @@ public void testTimeFieldValueGivenValueHasTwoElements() { TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField)); - expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); + expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit, new SourceSupplier(hit))); } public void testTimeFieldValueGivenValueIsString() { @@ -111,7 +112,7 @@ public void testTimeFieldValueGivenValueIsString() { TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Arrays.asList(timeField)); - expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit)); + expectThrows(RuntimeException.class, () -> extractedFields.timeFieldValue(hit, new SourceSupplier(hit))); } public void testBuildGivenMixtureOfTypes() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 2ba9146533b78..1ca74d4a43a87 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -592,7 +592,7 @@ public void testExtractionWithMultipleScalarTypesInSource() throws IOException { public void testExtractionWithProcessedFieldThrows() { ProcessedField processedField = mock(ProcessedField.class); - doThrow(new RuntimeException("process field error")).when(processedField).value(any(), any()); + doThrow(new RuntimeException("process field error")).when(processedField).value(any(), any(), any()); extractedFields = new ExtractedFields( Arrays.asList( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java index fdece811e7029..ee7bdcb51d60d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; +import org.elasticsearch.xpack.ml.extractor.SourceSupplier; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; import java.util.ArrayList; @@ -814,13 +815,14 @@ private void testDetect_GivenBooleanField(DataFrameAnalyticsConfig config, boole ); SearchHit hit = new SearchHitBuilder(42).addField("some_boolean", true).build(); - assertThat(booleanField.value(hit), arrayContaining(1)); + SourceSupplier sourceSupplier = new SourceSupplier(hit); + assertThat(booleanField.value(hit, sourceSupplier), arrayContaining(1)); hit = new SearchHitBuilder(42).addField("some_boolean", false).build(); - assertThat(booleanField.value(hit), arrayContaining(0)); + assertThat(booleanField.value(hit, sourceSupplier), arrayContaining(0)); hit = new SearchHitBuilder(42).addField("some_boolean", Arrays.asList(false, true, false)).build(); - assertThat(booleanField.value(hit), arrayContaining(0, 1, 0)); + assertThat(booleanField.value(hit, sourceSupplier), arrayContaining(0, 1, 0)); } public void testDetect_GivenBooleanField_OutlierDetection() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index cb02b8294b115..a87f3f88190c0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -326,6 +326,7 @@ private static DataFrameDataExtractor.Row newTestRow(SearchHit hit, String[] val private static DataFrameDataExtractor.Row newRow(SearchHit hit, String[] values, boolean isTraining, int checksum) { DataFrameDataExtractor.Row row = mock(DataFrameDataExtractor.Row.class); when(row.getHit()).thenReturn(hit); + when(row.getSource()).thenReturn(hit.getSourceAsMap()); when(row.getValues()).thenReturn(values); when(row.isTraining()).thenReturn(isTraining); when(row.getChecksum()).thenReturn(checksum); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java index 9fdae0c517eff..5239a71045b48 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/DocValueFieldTests.java @@ -25,7 +25,7 @@ public void testKeyword() { ExtractedField field = new DocValueField("a_keyword", Collections.singleton("keyword")); - assertThat(field.value(hit), equalTo(new String[] { "bar" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); assertThat(field.getName(), equalTo("a_keyword")); assertThat(field.getSearchField(), equalTo("a_keyword")); assertThat(field.getTypes(), contains("keyword")); @@ -41,7 +41,7 @@ public void testKeywordArray() { ExtractedField field = new DocValueField("array", Collections.singleton("keyword")); - assertThat(field.value(hit), equalTo(new String[] { "a", "b" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "a", "b" })); assertThat(field.getName(), equalTo("array")); assertThat(field.getSearchField(), equalTo("array")); assertThat(field.getTypes(), contains("keyword")); @@ -52,7 +52,7 @@ public void testKeywordArray() { expectThrows(UnsupportedOperationException.class, () -> field.getParentField()); ExtractedField missing = new DocValueField("missing", Collections.singleton("keyword")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testMissing() { @@ -60,7 +60,7 @@ public void testMissing() { ExtractedField missing = new DocValueField("missing", Collections.singleton("keyword")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testNewFromSource() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java index 50da104bd2e50..23420e518309b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ExtractedFieldsTests.java @@ -127,8 +127,8 @@ public void testApplyBooleanMapping_GivenDocValueField() { SearchHit hitTrue = new SearchHitBuilder(42).addField("a_bool", true).build(); SearchHit hitFalse = new SearchHitBuilder(42).addField("a_bool", false).build(); - assertThat(mapped.value(hitTrue), equalTo(new Integer[] { 1 })); - assertThat(mapped.value(hitFalse), equalTo(new Integer[] { 0 })); + assertThat(mapped.value(hitTrue, new SourceSupplier(hitTrue)), equalTo(new Integer[] { 1 })); + assertThat(mapped.value(hitFalse, new SourceSupplier(hitFalse)), equalTo(new Integer[] { 0 })); assertThat(mapped.getName(), equalTo(aBool.getName())); assertThat(mapped.getMethod(), equalTo(aBool.getMethod())); @@ -145,10 +145,10 @@ public void testApplyBooleanMapping_GivenSourceField() { SearchHit hitTrueArray = new SearchHitBuilder(42).setSource("{\"a_bool\": [\"true\", true]}").build(); SearchHit hitFalseArray = new SearchHitBuilder(42).setSource("{\"a_bool\": [\"false\", false]}").build(); - assertThat(mapped.value(hitTrue), equalTo(new Integer[] { 1 })); - assertThat(mapped.value(hitFalse), equalTo(new Integer[] { 0 })); - assertThat(mapped.value(hitTrueArray), equalTo(new Integer[] { 1, 1 })); - assertThat(mapped.value(hitFalseArray), equalTo(new Integer[] { 0, 0 })); + assertThat(mapped.value(hitTrue, new SourceSupplier(hitTrue)), equalTo(new Integer[] { 1 })); + assertThat(mapped.value(hitFalse, new SourceSupplier(hitFalse)), equalTo(new Integer[] { 0 })); + assertThat(mapped.value(hitTrueArray, new SourceSupplier(hitTrueArray)), equalTo(new Integer[] { 1, 1 })); + assertThat(mapped.value(hitFalseArray, new SourceSupplier(hitFalseArray)), equalTo(new Integer[] { 0, 0 })); assertThat(mapped.getName(), equalTo(aBool.getName())); assertThat(mapped.getMethod(), equalTo(aBool.getMethod())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java index 4b48b44bcd9d4..ad1aaebad8bd3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoPointFieldTests.java @@ -28,7 +28,7 @@ public void testGivenGeoPoint() { // doc_value field ExtractedField geo = new GeoPointField("geo"); - assertThat(geo.value(hit), equalTo(expected)); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected)); assertThat(geo.getName(), equalTo("geo")); assertThat(geo.getSearchField(), equalTo("geo")); assertThat(geo.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE)); @@ -45,7 +45,7 @@ public void testMissing() { ExtractedField geo = new GeoPointField("missing"); - assertThat(geo.value(hit), equalTo(new Object[0])); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testArray() { @@ -53,7 +53,7 @@ public void testArray() { ExtractedField geo = new GeoPointField("geo"); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit, new SourceSupplier(hit))); assertThat(e.getMessage(), equalTo("Unexpected values for a geo_point field: [1, 2]")); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java index efedf918d479f..5b84d61a6997c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java @@ -27,7 +27,7 @@ public void testObjectFormat() { ExtractedField geo = new GeoShapeField("geo"); - assertThat(geo.value(hit), equalTo(expected)); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected)); assertThat(geo.getName(), equalTo("geo")); assertThat(geo.getSearchField(), equalTo("geo")); assertThat(geo.getTypes(), contains("geo_shape")); @@ -48,7 +48,7 @@ public void testWKTFormat() { ExtractedField geo = new GeoShapeField("geo"); - assertThat(geo.value(hit), equalTo(expected)); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected)); assertThat(geo.getName(), equalTo("geo")); assertThat(geo.getSearchField(), equalTo("geo")); assertThat(geo.getTypes(), contains("geo_shape")); @@ -65,7 +65,7 @@ public void testMissing() { ExtractedField geo = new GeoShapeField("missing"); - assertThat(geo.value(hit), equalTo(new Object[0])); + assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testArray() { @@ -73,7 +73,7 @@ public void testArray() { ExtractedField geo = new GeoShapeField("geo"); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit, new SourceSupplier(hit))); assertThat(e.getMessage(), equalTo("Unexpected values for a geo_shape field: [1, 2]")); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java index 8c7fb82238713..daa190c03321a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/MultiFieldTests.java @@ -23,7 +23,7 @@ public void testGivenSameSearchField() { ExtractedField wrapped = new DocValueField("a.b", Collections.singleton("integer")); ExtractedField field = new MultiField("a", wrapped); - assertThat(field.value(hit), equalTo(new Integer[] { 2 })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new Integer[] { 2 })); assertThat(field.getName(), equalTo("a.b")); assertThat(field.getSearchField(), equalTo("a.b")); assertThat(field.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE)); @@ -39,7 +39,7 @@ public void testGivenDifferentSearchField() { ExtractedField wrapped = new DocValueField("a", Collections.singleton("integer")); ExtractedField field = new MultiField("a.b", "a", "a", wrapped); - assertThat(field.value(hit), equalTo(new Integer[] { 1 })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new Integer[] { 1 })); assertThat(field.getName(), equalTo("a.b")); assertThat(field.getSearchField(), equalTo("a")); assertThat(field.getMethod(), equalTo(ExtractedField.Method.DOC_VALUE)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java index 907908ef8f710..489a2aa01dfce 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ProcessedFieldTests.java @@ -44,13 +44,13 @@ public void testOneHotGetters() { public void testMissingExtractor() { ProcessedField processedField = new ProcessedField(makeOneHotPreProcessor(randomAlphaOfLength(10), "bar", "baz")); - assertThat(processedField.value(makeHit(), (s) -> null), emptyArray()); + assertThat(processedField.value(makeHit(), null, (s) -> null), emptyArray()); } public void testMissingInputValues() { ExtractedField extractedField = makeExtractedField(new Object[0]); ProcessedField processedField = new ProcessedField(makeOneHotPreProcessor(randomAlphaOfLength(10), "bar", "baz")); - assertThat(processedField.value(makeHit(), (s) -> extractedField), arrayContaining(is(nullValue()), is(nullValue()))); + assertThat(processedField.value(makeHit(), null, (s) -> extractedField), arrayContaining(is(nullValue()), is(nullValue()))); } public void testProcessedFieldFrequencyEncoding() { @@ -101,7 +101,7 @@ public void testProcessedField(PreProcessor preProcessor, Object[] inputs, Objec assert inputs.length == expectedOutputs.length; for (int i = 0; i < inputs.length; i++) { Object input = inputs[i]; - Object[] result = processedField.value(makeHit(input), (s) -> makeExtractedField(new Object[] { input })); + Object[] result = processedField.value(makeHit(input), null, (s) -> makeExtractedField(new Object[] { input })); assertThat( "Input [" + input + "] Expected " + Arrays.toString(expectedOutputs[i]) + " but received " + Arrays.toString(result), result, @@ -120,7 +120,7 @@ private static PreProcessor makeOneHotPreProcessor(String inputField, String... private static ExtractedField makeExtractedField(Object[] value) { ExtractedField extractedField = mock(ExtractedField.class); - when(extractedField.value(any())).thenReturn(value); + when(extractedField.value(any(), any())).thenReturn(value); return extractedField; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java index 1b60d878c5732..88aa9c2fe5f72 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/ScriptFieldTests.java @@ -23,7 +23,7 @@ public void testKeyword() { ExtractedField field = new ScriptField("a_keyword"); - assertThat(field.value(hit), equalTo(new String[] { "bar" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); assertThat(field.getName(), equalTo("a_keyword")); assertThat(field.getSearchField(), equalTo("a_keyword")); assertThat(field.getTypes().isEmpty(), is(true)); @@ -40,7 +40,7 @@ public void testKeywordArray() { ExtractedField field = new ScriptField("array"); - assertThat(field.value(hit), equalTo(new String[] { "a", "b" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "a", "b" })); assertThat(field.getName(), equalTo("array")); assertThat(field.getSearchField(), equalTo("array")); assertThat(field.getTypes().isEmpty(), is(true)); @@ -52,7 +52,7 @@ public void testKeywordArray() { expectThrows(UnsupportedOperationException.class, () -> field.newFromSource()); ExtractedField missing = new DocValueField("missing", Collections.singleton("keyword")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testMissing() { @@ -60,6 +60,6 @@ public void testMissing() { ExtractedField missing = new ScriptField("missing"); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java index 891033e76dfa1..22fbc779ddb06 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/SourceFieldTests.java @@ -24,7 +24,7 @@ public void testSingleValue() { ExtractedField field = new SourceField("single", Collections.singleton("text")); - assertThat(field.value(hit), equalTo(new String[] { "bar" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); assertThat(field.getName(), equalTo("single")); assertThat(field.getSearchField(), equalTo("single")); assertThat(field.getTypes(), contains("text")); @@ -42,7 +42,7 @@ public void testArray() { ExtractedField field = new SourceField("array", Collections.singleton("text")); - assertThat(field.value(hit), equalTo(new String[] { "a", "b" })); + assertThat(field.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "a", "b" })); assertThat(field.getName(), equalTo("array")); assertThat(field.getSearchField(), equalTo("array")); assertThat(field.getTypes(), contains("text")); @@ -60,7 +60,7 @@ public void testMissing() { ExtractedField missing = new SourceField("missing", Collections.singleton("text")); - assertThat(missing.value(hit), equalTo(new Object[0])); + assertThat(missing.value(hit, new SourceSupplier(hit)), equalTo(new Object[0])); } public void testValueGivenNested() { @@ -69,7 +69,7 @@ public void testValueGivenNested() { ExtractedField nested = new SourceField("level_1.level_2.foo", Collections.singleton("text")); - assertThat(nested.value(hit), equalTo(new String[] { "bar" })); + assertThat(nested.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); } public void testValueGivenNestedArray() { @@ -78,6 +78,6 @@ public void testValueGivenNestedArray() { ExtractedField nested = new SourceField("level_1.level_2.foo", Collections.singleton("text")); - assertThat(nested.value(hit), equalTo(new String[] { "bar" })); + assertThat(nested.value(hit, new SourceSupplier(hit)), equalTo(new String[] { "bar" })); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java index 79cf90498cd8a..987756b7e73c9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/TimeFieldTests.java @@ -29,7 +29,7 @@ public void testDocValueWithWholeMillisecondStringValue() { ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE); - assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis })); assertThat(timeField.getName(), equalTo("time")); assertThat(timeField.getSearchField(), equalTo("time")); assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos")); @@ -51,7 +51,7 @@ public void testDocValueWithFractionalMillisecondStringValue() { ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE); - assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis })); assertThat(timeField.getName(), equalTo("time")); assertThat(timeField.getSearchField(), equalTo("time")); assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos")); @@ -69,7 +69,7 @@ public void testScriptWithLongValue() { ExtractedField timeField = new TimeField("time", ExtractedField.Method.SCRIPT_FIELD); - assertThat(timeField.value(hit), equalTo(new Object[] { millis })); + assertThat(timeField.value(hit, new SourceSupplier(hit)), equalTo(new Object[] { millis })); assertThat(timeField.getName(), equalTo("time")); assertThat(timeField.getSearchField(), equalTo("time")); assertThat(timeField.getTypes(), containsInAnyOrder("date", "date_nanos")); @@ -87,7 +87,7 @@ public void testUnknownFormat() { final ExtractedField timeField = new TimeField("time", ExtractedField.Method.DOC_VALUE); assertThat( - expectThrows(IllegalStateException.class, () -> timeField.value(hit)).getMessage(), + expectThrows(IllegalStateException.class, () -> timeField.value(hit, new SourceSupplier(hit))).getMessage(), startsWith("Unexpected value for a time field") ); } diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index 7ddaa53a59914..a0679c90e0f9b 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -166,19 +166,18 @@ public void testMonitoringBulk() throws Exception { final SearchHits hits = response.getHits(); assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat( - "Monitoring documents must have the same timestamp", - Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), - equalTo(1L) - ); - assertThat( - "Monitoring documents must have the same source_node timestamp", - Arrays.stream(hits.getHits()) - .map(hit -> extractValue("source_node.timestamp", hit.getSourceAsMap())) - .distinct() - .count(), - equalTo(1L) - ); + Map sourceHit = hits.getHits()[0].getSourceAsMap(); + Object ts = extractValue("timestamp", sourceHit); + Object sn_ts = extractValue("source_node.timestamp", sourceHit); + for (int i = 1; i < hits.getHits().length; i++) { + sourceHit = hits.getHits()[i].getSourceAsMap(); + assertThat("Monitoring documents must have the same timestamp", extractValue("timestamp", sourceHit), equalTo(ts)); + assertThat( + "Monitoring documents must have the same source_node timestamp", + extractValue("source_node.timestamp", sourceHit), + equalTo(sn_ts) + ); + } for (final SearchHit hit : hits.getHits()) { assertMonitoringDoc(toMap(hit), system, interval); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index f8ac5f9032fee..d080355495a84 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -31,6 +31,7 @@ import java.util.Arrays; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; @@ -267,15 +268,16 @@ private void assertWatchesExist() { Set watchIds = new HashSet<>(Arrays.asList(ClusterAlertsUtil.WATCH_IDS)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { for (SearchHit hit : response.getHits().getHits()) { - String watchId = ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap()); + Map source = hit.getSourceAsMap(); + String watchId = ObjectPath.eval("metadata.xpack.watch", source); assertNotNull("Missing watch ID", watchId); assertTrue("found unexpected watch id", watchIds.contains(watchId)); - String version = ObjectPath.eval("metadata.xpack.version_created", hit.getSourceAsMap()); + String version = ObjectPath.eval("metadata.xpack.version_created", source); assertNotNull("Missing version from returned watch [" + watchId + "]", version); assertTrue(Version.fromId(Integer.parseInt(version)).onOrAfter(Version.fromId(ClusterAlertsUtil.LAST_UPDATED_VERSION))); - String uuid = ObjectPath.eval("metadata.xpack.cluster_uuid", hit.getSourceAsMap()); + String uuid = ObjectPath.eval("metadata.xpack.cluster_uuid", source); assertNotNull("Missing cluster uuid", uuid); assertEquals(clusterUUID, uuid); } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankCoordinatorContext.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankCoordinatorContext.java index 56054955d25e7..cc715c3568cfd 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankCoordinatorContext.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankCoordinatorContext.java @@ -119,9 +119,10 @@ protected boolean lessThan(RRFRankDoc a, RRFRankDoc b) { } value.score += 1.0f / (rankConstant + frank); + assert value.positions != null && value.scores != null; value.positions[fqi] = frank - 1; + assert rrfRankDoc.scores != null; value.scores[fqi] = rrfRankDoc.scores[fqi]; - return value; }); } @@ -139,6 +140,8 @@ protected boolean lessThan(RRFRankDoc a, RRFRankDoc b) { if (rrf1.score != rrf2.score) { return rrf1.score < rrf2.score ? 1 : -1; } + assert rrf1.positions != null && rrf1.scores != null; + assert rrf2.positions != null && rrf2.scores != null; assert rrf1.positions.length == rrf2.positions.length; for (int qi = 0; qi < rrf1.positions.length; ++qi) { if (rrf1.positions[qi] != NO_RANK && rrf2.positions[qi] != NO_RANK) { diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankShardContext.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankShardContext.java index 62e261d752d3e..9ef16482f64f2 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankShardContext.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFQueryPhaseRankShardContext.java @@ -51,16 +51,13 @@ public RRFRankShardResult combineQueryPhaseResults(List rankResults) { value = new RRFRankDoc(scoreDoc.doc, scoreDoc.shardIndex, queries, rankConstant); } - // calculate the current rrf score for this document - // later used to sort and covert to a rank + // calculate the current rrf score for this document, later used to sort and covert to a rank value.score += 1.0f / (rankConstant + frank); - // record the position for each query - // for explain and debugging + // record the position for each query, for explain and debugging + assert value.positions != null && value.scores != null; value.positions[findex] = frank - 1; - - // record the score for each query - // used to later re-rank on the coordinator + // record the score for each query, used to later re-rank on the coordinator value.scores[findex] = scoreDoc.score; return value; @@ -76,6 +73,9 @@ public RRFRankShardResult combineQueryPhaseResults(List rankResults) { if (rrf1.score != rrf2.score) { return rrf1.score < rrf2.score ? 1 : -1; } + + assert rrf1.positions != null && rrf1.scores != null; + assert rrf2.positions != null && rrf2.scores != null; assert rrf1.positions.length == rrf2.positions.length; for (int qi = 0; qi < rrf1.positions.length; ++qi) { if (rrf1.positions[qi] != NO_RANK && rrf2.positions[qi] != NO_RANK) { diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java index 84961f8442163..b3ea263b1d705 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.rank.rrf; import org.apache.lucene.search.Explanation; -import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -19,6 +18,7 @@ import java.util.Arrays; import java.util.Objects; +import static org.elasticsearch.TransportVersions.RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN; import static org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilder.DEFAULT_RANK_CONSTANT; /** @@ -47,7 +47,7 @@ public final class RRFRankDoc extends RankDoc { */ public final float[] scores; - public final int rankConstant; + public final Integer rankConstant; public RRFRankDoc(int doc, int shardIndex, int queryCount, int rankConstant) { super(doc, 0f, shardIndex); @@ -57,20 +57,38 @@ public RRFRankDoc(int doc, int shardIndex, int queryCount, int rankConstant) { this.rankConstant = rankConstant; } + public RRFRankDoc(int doc, int shardIndex) { + super(doc, 0f, shardIndex); + positions = null; + scores = null; + rankConstant = null; + } + public RRFRankDoc(StreamInput in) throws IOException { super(in); rank = in.readVInt(); - positions = in.readIntArray(); - scores = in.readFloatArray(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { - this.rankConstant = in.readVInt(); + if (in.getTransportVersion().onOrAfter(RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN)) { + if (in.readBoolean()) { + positions = in.readIntArray(); + } else { + positions = null; + } + scores = in.readOptionalFloatArray(); + rankConstant = in.readOptionalVInt(); } else { - this.rankConstant = DEFAULT_RANK_CONSTANT; + positions = in.readIntArray(); + scores = in.readFloatArray(); + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { + this.rankConstant = in.readVInt(); + } else { + this.rankConstant = DEFAULT_RANK_CONSTANT; + } } } @Override public Explanation explain(Explanation[] sources, String[] queryNames) { + assert positions != null && scores != null && rankConstant != null; assert sources.length == scores.length; int queries = positions.length; Explanation[] details = new Explanation[queries]; @@ -117,10 +135,21 @@ public Explanation explain(Explanation[] sources, String[] queryNames) { @Override public void doWriteTo(StreamOutput out) throws IOException { out.writeVInt(rank); - out.writeIntArray(positions); - out.writeFloatArray(scores); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { - out.writeVInt(rankConstant); + if (out.getTransportVersion().onOrAfter(RANK_DOC_OPTIONAL_METADATA_FOR_EXPLAIN)) { + if (positions != null) { + out.writeBoolean(true); + out.writeIntArray(positions); + } else { + out.writeBoolean(false); + } + out.writeOptionalFloatArray(scores); + out.writeOptionalVInt(rankConstant); + } else { + out.writeIntArray(positions == null ? new int[0] : positions); + out.writeFloatArray(scores == null ? new float[0] : scores); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { + out.writeVInt(rankConstant == null ? DEFAULT_RANK_CONSTANT : rankConstant); + } } } @@ -166,13 +195,14 @@ public String getWriteableName() { @Override protected void doToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("positions", positions); - builder.field("scores", scores); - builder.field("rankConstant", rankConstant); - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_16_0; + if (positions != null) { + builder.array("positions", positions); + } + if (scores != null) { + builder.array("scores", scores); + } + if (rankConstant != null) { + builder.field("rankConstant", rankConstant); + } } } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index a749a7c402c30..93445a9ce5ac9 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -105,7 +105,7 @@ protected RRFRetrieverBuilder clone(List newRetrievers, List rankResults) { + protected RRFRankDoc[] combineInnerRetrieverResults(List rankResults, boolean explain) { // combine the disjointed sets of TopDocs into a single set or RRFRankDocs // each RRFRankDoc will have both the position and score for each query where // it was within the result set for that query @@ -121,20 +121,26 @@ protected RRFRankDoc[] combineInnerRetrieverResults(List rankResults final int frank = rank; docsToRankResults.compute(new RankDoc.RankKey(scoreDoc.doc, scoreDoc.shardIndex), (key, value) -> { if (value == null) { - value = new RRFRankDoc(scoreDoc.doc, scoreDoc.shardIndex, queries, rankConstant); + if (explain) { + value = new RRFRankDoc(scoreDoc.doc, scoreDoc.shardIndex, queries, rankConstant); + } else { + value = new RRFRankDoc(scoreDoc.doc, scoreDoc.shardIndex); + } } // calculate the current rrf score for this document // later used to sort and covert to a rank value.score += 1.0f / (rankConstant + frank); - // record the position for each query - // for explain and debugging - value.positions[findex] = frank - 1; + if (explain && value.positions != null && value.scores != null) { + // record the position for each query + // for explain and debugging + value.positions[findex] = frank - 1; - // record the score for each query - // used to later re-rank on the coordinator - value.scores[findex] = scoreDoc.score; + // record the score for each query + // used to later re-rank on the coordinator + value.scores[findex] = scoreDoc.score; + } return value; }); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java index 8d8629db96fc6..23f33b2351c21 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java @@ -78,6 +78,7 @@ record ExpectedCluster(String clusterAlias, String indexExpression, String statu void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { Map clusters = (Map) responseMap.get("_clusters"); assertThat((int) responseMap.get("took"), greaterThan(0)); + assertThat((boolean) responseMap.get("is_partial"), is(false)); Map detailsMap = (Map) clusters.get("details"); assertThat(detailsMap.size(), is(expected.size())); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java index b6fc43e2a6e48..c7623779ee214 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1UnavailableRemotesIT.java @@ -94,6 +94,7 @@ private void clusterShutDownWithRandomSkipUnavailable() throws Exception { assertThat((int) map.get("took"), greaterThan(0)); assertThat(columns.size(), is(4)); assertThat(values.size(), is(9)); + assertThat((boolean) map.get("is_partial"), is(false)); assertThat((int) clusters.get("total"), is(2)); assertThat((int) clusters.get("successful"), is(2)); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java index 52cd0655fbfdf..b62d82c47f753 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java @@ -112,6 +112,7 @@ private void clusterShutDownWithRandomSkipUnavailable() throws Exception { assertThat((int) map.get("took"), greaterThan(0)); assertThat(columns.size(), is(4)); assertThat(values.size(), is(9)); + assertThat((boolean) map.get("is_partial"), is(false)); assertThat((int) clusters.get("total"), is(2)); assertThat((int) clusters.get("successful"), is(2)); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 41f2eab6a00e8..42d03838ed8d6 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -330,6 +331,19 @@ public void populateData() throws Exception { assertOK(adminClient().performRequest(putUserRequest)); } + private static String populateOtherUser() throws IOException { + String otherUser = REMOTE_SEARCH_USER + "_other"; + + final var putUserRequest = new Request("PUT", "/_security/user/" + otherUser); + putUserRequest.setJsonEntity(""" + { + "password": "x-pack-test-password", + "roles" : ["remote_search"] + }"""); + assertOK(adminClient().performRequest(putUserRequest)); + return otherUser; + } + @After public void wipeData() throws Exception { CheckedConsumer wipe = client -> { @@ -1198,7 +1212,116 @@ public void testSearchesAgainstNonMatchingIndices() throws Exception { } } + public void testCrossClusterAsyncQuery() throws Exception { + assumeTrue("delay() is only available in snapshot builds", Build.current().isSnapshot()); + configureRemoteCluster(); + populateData(); + String otherUser = populateOtherUser(); + + // Adding a delay there so that the async query is not completed before we check the status + Request request = esqlRequestAsync(""" + FROM employees, *:employees + | SORT emp_id ASC + | LIMIT 10 + | WHERE delay(10ms) + | KEEP emp_id, department"""); + Response response = performRequestWithRemoteSearchUser(request); + assertOK(response); + Map responseAsMap = entityAsMap(response); + assumeTrue("Query finished too fast, can not test", (boolean) responseAsMap.get("is_running")); + + String asyncId = (String) responseAsMap.get("id"); + response = performRequestWithRemoteSearchUser(esqlAsyncGetRequest(asyncId)); + assertOK(response); + responseAsMap = entityAsMap(response); + assertThat(responseAsMap.get("is_running"), equalTo(true)); + + // Other user can't see the async query + ResponseException error = expectThrows( + ResponseException.class, + () -> performRequestWithUser(esqlAsyncGetRequest(asyncId), otherUser) + ); + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + assertThat(error.getMessage(), containsString("resource_not_found_exception")); + + // Clean up + response = performRequestWithRemoteSearchUser(esqlAsyncDeleteRequest(asyncId)); + assertOK(response); + } + + public void testCrossClusterAsyncQueryStop() throws Exception { + assumeTrue("delay() is only available in snapshot builds", Build.current().isSnapshot()); + configureRemoteCluster(); + populateData(); + String otherUser = populateOtherUser(); + + // query remote cluster only + Request request = esqlRequestAsync(""" + FROM employees, *:employees + | SORT emp_id ASC + | LIMIT 10 + | WHERE delay(10ms) + | KEEP emp_id, department"""); + Response response = performRequestWithRemoteSearchUser(request); + assertOK(response); + Map responseAsMap = entityAsMap(response); + assertThat(responseAsMap.get("is_running"), equalTo(true)); + String asyncId = (String) responseAsMap.get("id"); + + response = performRequestWithRemoteSearchUser(esqlAsyncGetRequest(asyncId)); + assertOK(response); + responseAsMap = entityAsMap(response); + assertThat(responseAsMap.get("is_running"), equalTo(true)); + + // Other user can't see the async query + ResponseException error = expectThrows( + ResponseException.class, + () -> performRequestWithUser(esqlAsyncStopRequest(asyncId), otherUser) + ); + assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + assertThat(error.getMessage(), containsString("resource_not_found_exception")); + + response = performRequestWithRemoteSearchUser(esqlAsyncStopRequest(asyncId)); + assertOK(response); + responseAsMap = entityAsMap(response); + assertThat(responseAsMap.get("is_running"), equalTo(false)); + + // Clean up + response = performRequestWithRemoteSearchUser(esqlAsyncDeleteRequest(asyncId)); + assertOK(response); + } + protected Request esqlRequest(String command) throws IOException { + XContentBuilder body = getBody(command, null); + Request request = new Request("POST", "_query"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); + return request; + } + + protected Request esqlRequestAsync(String command) throws IOException { + XContentBuilder body = getBody(command, Map.of("wait_for_completion_timeout", "1ms")); + Request request = new Request("POST", "_query/async"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); + return request; + } + + protected Request esqlAsyncGetRequest(String asyncID) { + Request request = new Request("GET", "_query/async/" + asyncID); + request.addParameter("wait_for_completion_timeout", "1ms"); + return request; + } + + protected Request esqlAsyncStopRequest(String asyncID) { + Request request = new Request("POST", "_query/async/" + asyncID + "/stop"); + return request; + } + + protected Request esqlAsyncDeleteRequest(String asyncID) { + Request request = new Request("DELETE", "_query/async/" + asyncID); + return request; + } + + private static XContentBuilder getBody(String command, @Nullable Map extraParams) throws IOException { XContentBuilder body = JsonXContent.contentBuilder(); body.startObject(); body.field("query", command); @@ -1224,10 +1347,17 @@ protected Request esqlRequest(String command) throws IOException { body.endObject(); } } + if (extraParams != null) { + extraParams.forEach((name, value) -> { + try { + body.field(name, value); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } body.endObject(); - Request request = new Request("POST", "_query"); - request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); - return request; + return body; } private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { @@ -1237,6 +1367,11 @@ private Response performRequestWithRemoteSearchUser(final Request request) throw return client().performRequest(request); } + private Response performRequestWithUser(final Request request, String user) throws IOException { + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(user, PASS))); + return client().performRequest(request); + } + private Response performRequestWithRemoteSearchUserViaAPIKey(Request request, String encodedApiKey) throws IOException { request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encodedApiKey)); return client().performRequest(request); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 417d66019aae0..5a2d24e1aa3ce 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -567,6 +567,7 @@ public class Constants { "indices:data/read/eql/async/get", "indices:data/read/esql", "indices:data/read/esql/async/get", + "indices:data/read/esql/async/stop", "indices:data/read/esql/resolve_fields", "indices:data/read/esql/search_shards", "indices:data/read/explain", diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java index f051289d6d7cf..234aeeeb6e82e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentAndFieldLevelSecurityTests.java @@ -119,9 +119,10 @@ public void testSimpleQuery() { response -> { assertHitCount(response, 1); assertSearchHits(response, "1"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("id").toString(), equalTo("1")); } ); @@ -131,9 +132,10 @@ public void testSimpleQuery() { response -> { assertHitCount(response, 1); assertSearchHits(response, "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id").toString(), equalTo("2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("id").toString(), equalTo("2")); } ); @@ -197,8 +199,9 @@ public void testDLSIsAppliedBeforeFLS() { response -> { assertHitCount(response, 1); assertSearchHits(response, "2"); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field1").toString(), equalTo("value2")); } ); @@ -228,9 +231,10 @@ public void testQueryCache() { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1"), equalTo("value1")); + assertThat(source.get("id"), equalTo("1")); } ); assertResponse( @@ -239,9 +243,10 @@ public void testQueryCache() { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field2"), equalTo("value2")); + assertThat(source.get("id"), equalTo("2")); } ); @@ -254,8 +259,9 @@ public void testQueryCache() { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("id"), equalTo("2")); } ); @@ -267,13 +273,15 @@ public void testQueryCache() { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("id"), equalTo("1")); + Map source0 = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), equalTo(2)); + assertThat(source0.get("field1"), equalTo("value1")); + assertThat(source0.get("id"), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("field2"), equalTo("value2")); - assertThat(response.getHits().getAt(1).getSourceAsMap().get("id"), equalTo("2")); + Map source1 = response.getHits().getAt(1).getSourceAsMap(); + assertThat(source1.size(), equalTo(2)); + assertThat(source1.get("field2"), equalTo("value2")); + assertThat(source1.get("id"), equalTo("2")); } ); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 12b75c787d6e9..77594e3ae0b13 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -475,15 +475,17 @@ public void testMSearch() throws Exception { response -> { assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("id"), is(1)); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[1].isFailure()); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("id"), is(1)); } ); } @@ -496,15 +498,17 @@ public void testMSearch() throws Exception { response -> { assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("id"), is(2)); } ); } @@ -523,21 +527,25 @@ public void testMSearch() throws Exception { response -> { assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("id"), is(1)); + source0 = response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap(); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("id"), is(1)); + source1 = response.getResponses()[1].getResponse().getHits().getAt(1).getSourceAsMap(); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("id"), is(2)); } ); } @@ -1266,8 +1274,9 @@ public void testScroll() throws Exception { do { assertNoFailures(response); assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); if (response.getScrollId() == null) { break; @@ -1326,8 +1335,9 @@ public void testReaderId() throws Exception { .get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); } } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(response.pointInTimeId())).actionGet(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 6c7ba15b773ba..fadabb4e8fcb3 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -955,13 +955,15 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source0.size(), is(1)); + assertThat(source0.get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source1.size(), is(1)); + assertThat(source1.get("field1"), is("value1")); } ); } @@ -974,13 +976,15 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source0.size(), is(1)); + assertThat(source0.get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source1.size(), is(1)); + assertThat(source1.get("field2"), is("value2")); } ); } @@ -992,15 +996,17 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); } ); } @@ -1016,7 +1022,7 @@ public void testMSearchApi() throws Exception { assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); + assertThat(response.getResponses()[01].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); } @@ -1028,17 +1034,19 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source0.size(), is(3)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("field3"), is("value3")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source1.size(), is(3)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("field3"), is("value3")); } ); } @@ -1050,17 +1058,19 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source0.size(), is(3)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("field3"), is("value3")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source1.size(), is(3)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("field3"), is("value3")); } ); } @@ -1072,17 +1082,19 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source0.size(), is(3)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); + assertThat(source0.get("field3"), is("value3")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source1.size(), is(3)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); + assertThat(source1.get("field3"), is("value3")); } ); } @@ -1094,15 +1106,17 @@ public void testMSearchApi() throws Exception { .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { + Map source0 = response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap(); + Map source1 = response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source0.size(), is(2)); + assertThat(source0.get("field1"), is("value1")); + assertThat(source0.get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); + assertThat(source1.size(), is(2)); + assertThat(source1.get("field1"), is("value1")); + assertThat(source1.get("field2"), is("value2")); } ); } @@ -1134,8 +1148,9 @@ public void testScroll() throws Exception { do { assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); if (response.getScrollId() == null) { break; @@ -1191,10 +1206,11 @@ public void testPointInTimeId() throws Exception { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); } ); } @@ -1221,9 +1237,10 @@ public void testQueryCache() throws Exception { .prepareSearch("test") .setQuery(constantScoreQuery(termQuery("field1", "value1"))), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); } ); assertHitCountAndNoFailures( @@ -1238,11 +1255,12 @@ public void testQueryCache() throws Exception { Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(multipleFieldsUser, USERS_PASSWD)) ).prepareSearch("test").setQuery(constantScoreQuery(termQuery("field1", "value1"))), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); + assertThat(source.size(), is(3)); + assertThat(source.get("field1"), is("value1")); + assertThat(source.get("field2"), is("value2")); + assertThat(source.get("field3"), is("value3")); } ); } @@ -1311,8 +1329,9 @@ public void testScrollWithQueryCache() { .get(); assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = user1SearchResponse.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); scrolledDocsUser1++; } else { user1SearchResponse.decRef(); @@ -1322,8 +1341,9 @@ public void testScrollWithQueryCache() { assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); - assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + Map source = user1SearchResponse.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), is(1)); + assertThat(source.get("field1"), is("value1")); scrolledDocsUser1++; } else { assertThat(user1SearchResponse.getHits().getHits().length, is(0)); @@ -1555,8 +1575,9 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field1").toString(), equalTo("value1")); } ); @@ -1565,8 +1586,9 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); @@ -1575,9 +1597,10 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); @@ -1593,10 +1616,11 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("field3").toString(), equalTo("value3")); } ); @@ -1605,10 +1629,11 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("field3").toString(), equalTo("value3")); } ); @@ -1617,10 +1642,11 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(3)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field3").toString(), equalTo("value3")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(3)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); + assertThat(source.get("field3").toString(), equalTo("value3")); } ); @@ -1629,9 +1655,10 @@ public void testSource() throws Exception { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) .prepareSearch("test"), response -> { - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); } @@ -2127,9 +2154,10 @@ public void testQuery_withRoleWithFieldWildcards() { .setQuery(matchQuery("field1", "value1")), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); @@ -2138,10 +2166,11 @@ public void testQuery_withRoleWithFieldWildcards() { .prepareSearch("test") .setQuery(matchQuery("field2", "value2")), response -> { + Map source = response.getHits().getAt(0).getSourceAsMap(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(2)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1").toString(), equalTo("value1")); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("field2").toString(), equalTo("value2")); + assertThat(source.size(), equalTo(2)); + assertThat(source.get("field1").toString(), equalTo("value1")); + assertThat(source.get("field2").toString(), equalTo("value2")); } ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 4f7ba7808b823..eabd2e2556f89 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -1769,7 +1769,7 @@ private void searchActiveTokens( client, request, new ContextPreservingActionListener<>(supplier, listener), - (SearchHit hit) -> filterAndParseHit(hit, filter) + (SearchHit hit) -> filterAndParseHit(hit.getSourceAsMap(), filter) ); }, listener::onFailure)); } @@ -1913,9 +1913,8 @@ private static Predicate> isOfUser(String username) { }; } - private static Tuple filterAndParseHit(SearchHit hit, @Nullable Predicate> filter) + private static Tuple filterAndParseHit(Map source, @Nullable Predicate> filter) throws IllegalStateException, DateTimeException { - final Map source = hit.getSourceAsMap(); if (source == null) { throw new IllegalStateException("token document did not have source but source should have been fetched"); } @@ -2737,7 +2736,6 @@ static RefreshTokenStatus fromSourceMap(Map refreshTokenSource) } record Doc(String id, Map sourceAsMap, long seqNo, long primaryTerm) { - Doc(SearchHit searchHit) { this(searchHit.getId(), searchHit.getSourceAsMap(), searchHit.getSeqNo(), searchHit.getPrimaryTerm()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 614401770cfb7..b9270b6035680 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -1030,6 +1030,7 @@ private static boolean isAsyncRelatedAction(String action) { || action.equals(TransportDeleteAsyncResultAction.TYPE.name()) || action.equals(EqlAsyncActionNames.EQL_ASYNC_GET_RESULT_ACTION_NAME) || action.equals(EsqlAsyncActionNames.ESQL_ASYNC_GET_RESULT_ACTION_NAME) + || action.equals(EsqlAsyncActionNames.ESQL_ASYNC_STOP_ACTION_NAME) || action.equals(SqlAsyncActionNames.SQL_ASYNC_GET_RESULT_ACTION_NAME); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml index 7ade369893f4b..df85362df5fa8 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml @@ -1,4 +1,4 @@ -setup: +"Source values are mutated as expected": - requires: cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] reason: "Feature implemented" @@ -14,7 +14,6 @@ setup: properties: events: type: counted_keyword - synthetic_source_keep: none - do: @@ -53,71 +52,525 @@ setup: id: "6" body: { "events": [null, null]} + - do: + index: + index: test-events + id: "7" + body: { "events": [["a", "b"], "a", ["c"], [["b"], "c"]]} + - do: indices.refresh: { } ---- -"Source values are mutated as expected": - - do: - search: - index: test-events - body: - query: - ids: - values: [1] - - match: - hits.hits.0._source: - events: ["a", "a", "b", "c"] + - do: + search: + index: test-events + body: + query: + ids: + values: [1] + - match: + hits.hits.0._source: + events: ["a", "a", "b", "c"] - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [2] - - match: + - match: hits.hits.0._source: events: ["a", "b", "b", "b", "c"] - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [3] - - match: + - match: hits.hits.0._source: events: ["a", "b", "c", "c"] - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [4] - - match: + - match: hits.hits.0._source: events: "a" - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [5] - - match: + - match: hits.hits.0._source: {} - - do: - search: - index: test-events - body: + - do: + search: + index: test-events + body: query: ids: values: [6] - - match: + - match: hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [7] + - match: + hits.hits.0._source: + events: ["a", "a", "b", "b", "c", "c"] + +--- + +"synthetic_source_keep value is respected": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + events: + type: counted_keyword + synthetic_source_keep: all + + - do: + index: + index: test-events + id: "1" + body: { "events": [ "a", "b", "a", "c" ] } + + - do: + index: + index: test-events + id: "2" + body: { "events": [ "b", "b", "c", "a", "b" ] } + + - do: + index: + index: test-events + id: "3" + body: { "events": [ "c", "a", null, "b", null, "c" ] } + + - do: + index: + index: test-events + id: "4" + body: { "events": [ "a" ] } + + - do: + index: + index: test-events + id: "5" + body: { "events": [ ] } + + - do: + index: + index: test-events + id: "6" + body: { "events": [ null, null ] } + + - do: + index: + index: test-events + id: "7" + body: { "events": [["a", "b"], "a", ["c"], [["b"], "c"]]} + + - do: + indices.refresh: { } + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + events: [ "a", "b", "a", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + events: [ "b", "b", "c", "a", "b" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + events: [ "c", "a", null, "b", null, "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + events: [ "a" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: + events: [ ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: + events: [ null, null ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + events: [["a", "b"], "a", ["c"], [["b"], "c"]] + +--- + +"synthetic_source_keep value is not inherited": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + event-object: + type: object + synthetic_source_keep: arrays + properties: + event-object-2: + type: object + properties: + events: + type: counted_keyword + - do: + index: + index: test-events + id: "1" + body: { "event-object": { "event-object-2": { "events": [ "a", "b", "a", "c" ] } } } + + - do: + index: + index: test-events + id: "2" + body: { "event-object": { "event-object-2": { "events": [ "b", "b", "c", "a", "b" ] } } } + + - do: + index: + index: test-events + id: "3" + body: { "event-object": { "event-object-2": { "events": [ "c", "a", null, "b", null, "c" ] } } } + + - do: + index: + index: test-events + id: "4" + body: { "event-object": { "event-object-2": { "events": [ "a" ] } } } + + - do: + index: + index: test-events + id: "5" + body: { "event-object": { "event-object-2": { "events": [ ] } } } + + - do: + index: + index: test-events + id: "6" + body: { "event-object": { "event-object-2": { "events": [ null, null ] } } } + + - do: + index: + index: test-events + id: "7" + body: { "event-object": { "event-object-2": { "events": [["a", "b"], "a", ["c"], [["b"], "c"]] } } } + + - do: + indices.refresh: { } + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "a", "b", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "b", "b", "b", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "b", "c", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: "a" + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + event-object: + event-object-2: + events: [ "a", "a", "b", "b", "c", "c" ] + +--- + +"Index-level synthetic_source_keep value is respected": + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mapping.synthetic_source_keep: arrays + mappings: + properties: + events: + type: counted_keyword + + - do: + index: + index: test-events + id: "1" + body: { "events": [ "a", "b", "a", "c" ] } + + - do: + index: + index: test-events + id: "2" + body: { "events": [ "b", "b", "c", "a", "b" ] } + + - do: + index: + index: test-events + id: "3" + body: { "events": [ "c", "a", null, "b", null, "c" ] } + + - do: + index: + index: test-events + id: "4" + body: { "events": [ "a" ] } + + - do: + index: + index: test-events + id: "5" + body: { "events": [ ] } + + - do: + index: + index: test-events + id: "6" + body: { "events": [ null, null ] } + + - do: + index: + index: test-events + id: "7" + body: { "events": [ [ "a", "b" ], "a", [ "c" ], [ [ "b" ], "c" ] ] } + + - do: + indices.refresh: { } + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 1 ] + - match: + hits.hits.0._source: + events: [ "a", "b", "a", "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 2 ] + - match: + hits.hits.0._source: + events: [ "b", "b", "c", "a", "b" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 3 ] + - match: + hits.hits.0._source: + events: [ "c", "a", null, "b", null, "c" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 4 ] + - match: + hits.hits.0._source: + events: [ "a" ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 5 ] + - match: + hits.hits.0._source: + events: [ ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 6 ] + - match: + hits.hits.0._source: + events: [ null, null ] + + - do: + search: + index: test-events + body: + query: + ids: + values: [ 7 ] + - match: + hits.hits.0._source: + events: [["a", "b"], "a", ["c"], [["b"], "c"]] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml index 1b532ab80eeb6..7d1b3a90c6081 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/191_lookup_join_text.yml @@ -6,7 +6,7 @@ setup: - method: POST path: /_query parameters: [] - capabilities: [lookup_join_text] + capabilities: [lookup_join_text, join_lookup_v11] reason: "uses LOOKUP JOIN" - do: indices.create: @@ -31,7 +31,6 @@ setup: settings: index: mode: lookup - number_of_shards: 1 mappings: properties: color: diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java index 8a66ceeed0394..2b244fef515d6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java @@ -10,8 +10,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -93,11 +91,7 @@ private static Map convertBucketToDocument( ); } - // We don't use #getSourceAsMap here because we don't want to cache the object as we - // only need it here. More over we are modifying the map of maps so we will be holding - // the wrong map. - BytesReference bytes = topHits.getHits().getHits()[0].getSourceRef(); - Map document = XContentHelper.convertToMap(bytes, true).v2(); + Map document = topHits.getHits().getHits()[0].getSourceAsMap(); // generator to create unique but deterministic document ids, so we // - do not create duplicates if we re-run after failure diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index f3648580691cb..9844b1eac6d43 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import java.util.Arrays; +import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; @@ -56,14 +57,15 @@ public void testHistoryOnRejection() throws Exception { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); + assertThat( "Did not find watcher history for rejected watch", - Arrays.stream(searchResponse.getHits().getHits()) - .anyMatch( - hit -> hit.getSourceAsMap() != null - && hit.getSourceAsMap().get("messages") != null - && hit.getSourceAsMap().get("messages").toString().contains("due to thread pool capacity") - ), + Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> { + Map source = hit.getSourceAsMap(); + return source != null + && source.get("messages") != null + && source.get("messages").toString().contains("due to thread pool capacity"); + }), equalTo(true) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 2ec6541275d04..19d5bfa8ca678 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -140,14 +140,16 @@ public void testScriptTransform() throws Exception { assertNoFailuresAndResponse(prepareSearch("output1"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key3").toString(), equalTo("20")); }); } @@ -224,14 +226,16 @@ public void testChainTransform() throws Exception { assertNoFailuresAndResponse(prepareSearch("output1"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); + Map source = response.getHits().getAt(0).getSourceAsMap(); + assertThat(source.size(), equalTo(1)); + assertThat(source.get("key4").toString(), equalTo("30")); }); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 8b8d8caacdd96..337fc00cc7caf 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -113,7 +113,8 @@ public static void main(String[] args) throws Exception { internalNodeEnv, PluginsLoader.createPluginsLoader( PluginsLoader.loadModulesBundles(internalNodeEnv.modulesFile()), - PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsFile()) + PluginsLoader.loadPluginsBundles(internalNodeEnv.pluginsFile()), + Map.of() ) ).start() ) {